##// END OF EJS Templates
pytest: use consistent way of creating a fixture by using pytest.fixture()
marcink -
r3946:39fb0295 default
parent child Browse files
Show More
@@ -1,84 +1,84 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import pytest
22 import pytest
23
23
24 from rhodecode.model.scm import ScmModel
24 from rhodecode.model.scm import ScmModel
25 from rhodecode.api.tests.utils import build_data, api_call, assert_ok
25 from rhodecode.api.tests.utils import build_data, api_call, assert_ok
26
26
27
27
28 @pytest.fixture
28 @pytest.fixture()
29 def http_host_stub():
29 def http_host_stub():
30 """
30 """
31 To ensure that we can get an IP address, this test shall run with a
31 To ensure that we can get an IP address, this test shall run with a
32 hostname set to "localhost".
32 hostname set to "localhost".
33 """
33 """
34 return 'localhost:80'
34 return 'localhost:80'
35
35
36
36
37 @pytest.mark.usefixtures("testuser_api", "app")
37 @pytest.mark.usefixtures("testuser_api", "app")
38 class TestGetServerInfo(object):
38 class TestGetServerInfo(object):
39 def test_api_get_server_info(self):
39 def test_api_get_server_info(self):
40 id_, params = build_data(self.apikey, 'get_server_info')
40 id_, params = build_data(self.apikey, 'get_server_info')
41 response = api_call(self.app, params)
41 response = api_call(self.app, params)
42 resp = response.json
42 resp = response.json
43 expected = ScmModel().get_server_info()
43 expected = ScmModel().get_server_info()
44 expected['memory'] = resp['result']['memory']
44 expected['memory'] = resp['result']['memory']
45 expected['uptime'] = resp['result']['uptime']
45 expected['uptime'] = resp['result']['uptime']
46 expected['load'] = resp['result']['load']
46 expected['load'] = resp['result']['load']
47 expected['cpu'] = resp['result']['cpu']
47 expected['cpu'] = resp['result']['cpu']
48 expected['storage'] = resp['result']['storage']
48 expected['storage'] = resp['result']['storage']
49 expected['storage_temp'] = resp['result']['storage_temp']
49 expected['storage_temp'] = resp['result']['storage_temp']
50 expected['storage_inodes'] = resp['result']['storage_inodes']
50 expected['storage_inodes'] = resp['result']['storage_inodes']
51 expected['server'] = resp['result']['server']
51 expected['server'] = resp['result']['server']
52
52
53 expected['index_storage'] = resp['result']['index_storage']
53 expected['index_storage'] = resp['result']['index_storage']
54 expected['storage'] = resp['result']['storage']
54 expected['storage'] = resp['result']['storage']
55
55
56 assert_ok(id_, expected, given=response.body)
56 assert_ok(id_, expected, given=response.body)
57
57
58 def test_api_get_server_info_ip(self):
58 def test_api_get_server_info_ip(self):
59 id_, params = build_data(self.apikey, 'get_server_info')
59 id_, params = build_data(self.apikey, 'get_server_info')
60 response = api_call(self.app, params)
60 response = api_call(self.app, params)
61 resp = response.json
61 resp = response.json
62 expected = ScmModel().get_server_info({'SERVER_NAME': 'unknown'})
62 expected = ScmModel().get_server_info({'SERVER_NAME': 'unknown'})
63 expected['memory'] = resp['result']['memory']
63 expected['memory'] = resp['result']['memory']
64 expected['uptime'] = resp['result']['uptime']
64 expected['uptime'] = resp['result']['uptime']
65 expected['load'] = resp['result']['load']
65 expected['load'] = resp['result']['load']
66 expected['cpu'] = resp['result']['cpu']
66 expected['cpu'] = resp['result']['cpu']
67 expected['storage'] = resp['result']['storage']
67 expected['storage'] = resp['result']['storage']
68 expected['storage_temp'] = resp['result']['storage_temp']
68 expected['storage_temp'] = resp['result']['storage_temp']
69 expected['storage_inodes'] = resp['result']['storage_inodes']
69 expected['storage_inodes'] = resp['result']['storage_inodes']
70 expected['server'] = resp['result']['server']
70 expected['server'] = resp['result']['server']
71
71
72 expected['index_storage'] = resp['result']['index_storage']
72 expected['index_storage'] = resp['result']['index_storage']
73 expected['storage'] = resp['result']['storage']
73 expected['storage'] = resp['result']['storage']
74
74
75 assert_ok(id_, expected, given=response.body)
75 assert_ok(id_, expected, given=response.body)
76
76
77 def test_api_get_server_info_data_for_search_index_build(self):
77 def test_api_get_server_info_data_for_search_index_build(self):
78 id_, params = build_data(self.apikey, 'get_server_info')
78 id_, params = build_data(self.apikey, 'get_server_info')
79 response = api_call(self.app, params)
79 response = api_call(self.app, params)
80 resp = response.json
80 resp = response.json
81
81
82 # required by indexer
82 # required by indexer
83 assert resp['result']['index_storage']
83 assert resp['result']['index_storage']
84 assert resp['result']['storage']
84 assert resp['result']['storage']
@@ -1,743 +1,743 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 import rhodecode
24 import rhodecode
25 from rhodecode.apps._base import ADMIN_PREFIX
25 from rhodecode.apps._base import ADMIN_PREFIX
26 from rhodecode.lib.utils2 import md5
26 from rhodecode.lib.utils2 import md5
27 from rhodecode.model.db import RhodeCodeUi
27 from rhodecode.model.db import RhodeCodeUi
28 from rhodecode.model.meta import Session
28 from rhodecode.model.meta import Session
29 from rhodecode.model.settings import SettingsModel, IssueTrackerSettingsModel
29 from rhodecode.model.settings import SettingsModel, IssueTrackerSettingsModel
30 from rhodecode.tests import assert_session_flash
30 from rhodecode.tests import assert_session_flash
31 from rhodecode.tests.utils import AssertResponse
31 from rhodecode.tests.utils import AssertResponse
32
32
33
33
34 UPDATE_DATA_QUALNAME = 'rhodecode.model.update.UpdateModel.get_update_data'
34 UPDATE_DATA_QUALNAME = 'rhodecode.model.update.UpdateModel.get_update_data'
35
35
36
36
37 def route_path(name, params=None, **kwargs):
37 def route_path(name, params=None, **kwargs):
38 import urllib
38 import urllib
39 from rhodecode.apps._base import ADMIN_PREFIX
39 from rhodecode.apps._base import ADMIN_PREFIX
40
40
41 base_url = {
41 base_url = {
42
42
43 'admin_settings':
43 'admin_settings':
44 ADMIN_PREFIX +'/settings',
44 ADMIN_PREFIX +'/settings',
45 'admin_settings_update':
45 'admin_settings_update':
46 ADMIN_PREFIX + '/settings/update',
46 ADMIN_PREFIX + '/settings/update',
47 'admin_settings_global':
47 'admin_settings_global':
48 ADMIN_PREFIX + '/settings/global',
48 ADMIN_PREFIX + '/settings/global',
49 'admin_settings_global_update':
49 'admin_settings_global_update':
50 ADMIN_PREFIX + '/settings/global/update',
50 ADMIN_PREFIX + '/settings/global/update',
51 'admin_settings_vcs':
51 'admin_settings_vcs':
52 ADMIN_PREFIX + '/settings/vcs',
52 ADMIN_PREFIX + '/settings/vcs',
53 'admin_settings_vcs_update':
53 'admin_settings_vcs_update':
54 ADMIN_PREFIX + '/settings/vcs/update',
54 ADMIN_PREFIX + '/settings/vcs/update',
55 'admin_settings_vcs_svn_pattern_delete':
55 'admin_settings_vcs_svn_pattern_delete':
56 ADMIN_PREFIX + '/settings/vcs/svn_pattern_delete',
56 ADMIN_PREFIX + '/settings/vcs/svn_pattern_delete',
57 'admin_settings_mapping':
57 'admin_settings_mapping':
58 ADMIN_PREFIX + '/settings/mapping',
58 ADMIN_PREFIX + '/settings/mapping',
59 'admin_settings_mapping_update':
59 'admin_settings_mapping_update':
60 ADMIN_PREFIX + '/settings/mapping/update',
60 ADMIN_PREFIX + '/settings/mapping/update',
61 'admin_settings_visual':
61 'admin_settings_visual':
62 ADMIN_PREFIX + '/settings/visual',
62 ADMIN_PREFIX + '/settings/visual',
63 'admin_settings_visual_update':
63 'admin_settings_visual_update':
64 ADMIN_PREFIX + '/settings/visual/update',
64 ADMIN_PREFIX + '/settings/visual/update',
65 'admin_settings_issuetracker':
65 'admin_settings_issuetracker':
66 ADMIN_PREFIX + '/settings/issue-tracker',
66 ADMIN_PREFIX + '/settings/issue-tracker',
67 'admin_settings_issuetracker_update':
67 'admin_settings_issuetracker_update':
68 ADMIN_PREFIX + '/settings/issue-tracker/update',
68 ADMIN_PREFIX + '/settings/issue-tracker/update',
69 'admin_settings_issuetracker_test':
69 'admin_settings_issuetracker_test':
70 ADMIN_PREFIX + '/settings/issue-tracker/test',
70 ADMIN_PREFIX + '/settings/issue-tracker/test',
71 'admin_settings_issuetracker_delete':
71 'admin_settings_issuetracker_delete':
72 ADMIN_PREFIX + '/settings/issue-tracker/delete',
72 ADMIN_PREFIX + '/settings/issue-tracker/delete',
73 'admin_settings_email':
73 'admin_settings_email':
74 ADMIN_PREFIX + '/settings/email',
74 ADMIN_PREFIX + '/settings/email',
75 'admin_settings_email_update':
75 'admin_settings_email_update':
76 ADMIN_PREFIX + '/settings/email/update',
76 ADMIN_PREFIX + '/settings/email/update',
77 'admin_settings_hooks':
77 'admin_settings_hooks':
78 ADMIN_PREFIX + '/settings/hooks',
78 ADMIN_PREFIX + '/settings/hooks',
79 'admin_settings_hooks_update':
79 'admin_settings_hooks_update':
80 ADMIN_PREFIX + '/settings/hooks/update',
80 ADMIN_PREFIX + '/settings/hooks/update',
81 'admin_settings_hooks_delete':
81 'admin_settings_hooks_delete':
82 ADMIN_PREFIX + '/settings/hooks/delete',
82 ADMIN_PREFIX + '/settings/hooks/delete',
83 'admin_settings_search':
83 'admin_settings_search':
84 ADMIN_PREFIX + '/settings/search',
84 ADMIN_PREFIX + '/settings/search',
85 'admin_settings_labs':
85 'admin_settings_labs':
86 ADMIN_PREFIX + '/settings/labs',
86 ADMIN_PREFIX + '/settings/labs',
87 'admin_settings_labs_update':
87 'admin_settings_labs_update':
88 ADMIN_PREFIX + '/settings/labs/update',
88 ADMIN_PREFIX + '/settings/labs/update',
89
89
90 'admin_settings_sessions':
90 'admin_settings_sessions':
91 ADMIN_PREFIX + '/settings/sessions',
91 ADMIN_PREFIX + '/settings/sessions',
92 'admin_settings_sessions_cleanup':
92 'admin_settings_sessions_cleanup':
93 ADMIN_PREFIX + '/settings/sessions/cleanup',
93 ADMIN_PREFIX + '/settings/sessions/cleanup',
94 'admin_settings_system':
94 'admin_settings_system':
95 ADMIN_PREFIX + '/settings/system',
95 ADMIN_PREFIX + '/settings/system',
96 'admin_settings_system_update':
96 'admin_settings_system_update':
97 ADMIN_PREFIX + '/settings/system/updates',
97 ADMIN_PREFIX + '/settings/system/updates',
98 'admin_settings_open_source':
98 'admin_settings_open_source':
99 ADMIN_PREFIX + '/settings/open_source',
99 ADMIN_PREFIX + '/settings/open_source',
100
100
101
101
102 }[name].format(**kwargs)
102 }[name].format(**kwargs)
103
103
104 if params:
104 if params:
105 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
105 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
106 return base_url
106 return base_url
107
107
108
108
109 @pytest.mark.usefixtures('autologin_user', 'app')
109 @pytest.mark.usefixtures('autologin_user', 'app')
110 class TestAdminSettingsController(object):
110 class TestAdminSettingsController(object):
111
111
112 @pytest.mark.parametrize('urlname', [
112 @pytest.mark.parametrize('urlname', [
113 'admin_settings_vcs',
113 'admin_settings_vcs',
114 'admin_settings_mapping',
114 'admin_settings_mapping',
115 'admin_settings_global',
115 'admin_settings_global',
116 'admin_settings_visual',
116 'admin_settings_visual',
117 'admin_settings_email',
117 'admin_settings_email',
118 'admin_settings_hooks',
118 'admin_settings_hooks',
119 'admin_settings_search',
119 'admin_settings_search',
120 ])
120 ])
121 def test_simple_get(self, urlname):
121 def test_simple_get(self, urlname):
122 self.app.get(route_path(urlname))
122 self.app.get(route_path(urlname))
123
123
124 def test_create_custom_hook(self, csrf_token):
124 def test_create_custom_hook(self, csrf_token):
125 response = self.app.post(
125 response = self.app.post(
126 route_path('admin_settings_hooks_update'),
126 route_path('admin_settings_hooks_update'),
127 params={
127 params={
128 'new_hook_ui_key': 'test_hooks_1',
128 'new_hook_ui_key': 'test_hooks_1',
129 'new_hook_ui_value': 'cd /tmp',
129 'new_hook_ui_value': 'cd /tmp',
130 'csrf_token': csrf_token})
130 'csrf_token': csrf_token})
131
131
132 response = response.follow()
132 response = response.follow()
133 response.mustcontain('test_hooks_1')
133 response.mustcontain('test_hooks_1')
134 response.mustcontain('cd /tmp')
134 response.mustcontain('cd /tmp')
135
135
136 def test_create_custom_hook_delete(self, csrf_token):
136 def test_create_custom_hook_delete(self, csrf_token):
137 response = self.app.post(
137 response = self.app.post(
138 route_path('admin_settings_hooks_update'),
138 route_path('admin_settings_hooks_update'),
139 params={
139 params={
140 'new_hook_ui_key': 'test_hooks_2',
140 'new_hook_ui_key': 'test_hooks_2',
141 'new_hook_ui_value': 'cd /tmp2',
141 'new_hook_ui_value': 'cd /tmp2',
142 'csrf_token': csrf_token})
142 'csrf_token': csrf_token})
143
143
144 response = response.follow()
144 response = response.follow()
145 response.mustcontain('test_hooks_2')
145 response.mustcontain('test_hooks_2')
146 response.mustcontain('cd /tmp2')
146 response.mustcontain('cd /tmp2')
147
147
148 hook_id = SettingsModel().get_ui_by_key('test_hooks_2').ui_id
148 hook_id = SettingsModel().get_ui_by_key('test_hooks_2').ui_id
149
149
150 # delete
150 # delete
151 self.app.post(
151 self.app.post(
152 route_path('admin_settings_hooks_delete'),
152 route_path('admin_settings_hooks_delete'),
153 params={'hook_id': hook_id, 'csrf_token': csrf_token})
153 params={'hook_id': hook_id, 'csrf_token': csrf_token})
154 response = self.app.get(route_path('admin_settings_hooks'))
154 response = self.app.get(route_path('admin_settings_hooks'))
155 response.mustcontain(no=['test_hooks_2'])
155 response.mustcontain(no=['test_hooks_2'])
156 response.mustcontain(no=['cd /tmp2'])
156 response.mustcontain(no=['cd /tmp2'])
157
157
158
158
159 @pytest.mark.usefixtures('autologin_user', 'app')
159 @pytest.mark.usefixtures('autologin_user', 'app')
160 class TestAdminSettingsGlobal(object):
160 class TestAdminSettingsGlobal(object):
161
161
162 def test_pre_post_code_code_active(self, csrf_token):
162 def test_pre_post_code_code_active(self, csrf_token):
163 pre_code = 'rc-pre-code-187652122'
163 pre_code = 'rc-pre-code-187652122'
164 post_code = 'rc-postcode-98165231'
164 post_code = 'rc-postcode-98165231'
165
165
166 response = self.post_and_verify_settings({
166 response = self.post_and_verify_settings({
167 'rhodecode_pre_code': pre_code,
167 'rhodecode_pre_code': pre_code,
168 'rhodecode_post_code': post_code,
168 'rhodecode_post_code': post_code,
169 'csrf_token': csrf_token,
169 'csrf_token': csrf_token,
170 })
170 })
171
171
172 response = response.follow()
172 response = response.follow()
173 response.mustcontain(pre_code, post_code)
173 response.mustcontain(pre_code, post_code)
174
174
175 def test_pre_post_code_code_inactive(self, csrf_token):
175 def test_pre_post_code_code_inactive(self, csrf_token):
176 pre_code = 'rc-pre-code-187652122'
176 pre_code = 'rc-pre-code-187652122'
177 post_code = 'rc-postcode-98165231'
177 post_code = 'rc-postcode-98165231'
178 response = self.post_and_verify_settings({
178 response = self.post_and_verify_settings({
179 'rhodecode_pre_code': '',
179 'rhodecode_pre_code': '',
180 'rhodecode_post_code': '',
180 'rhodecode_post_code': '',
181 'csrf_token': csrf_token,
181 'csrf_token': csrf_token,
182 })
182 })
183
183
184 response = response.follow()
184 response = response.follow()
185 response.mustcontain(no=[pre_code, post_code])
185 response.mustcontain(no=[pre_code, post_code])
186
186
187 def test_captcha_activate(self, csrf_token):
187 def test_captcha_activate(self, csrf_token):
188 self.post_and_verify_settings({
188 self.post_and_verify_settings({
189 'rhodecode_captcha_private_key': '1234567890',
189 'rhodecode_captcha_private_key': '1234567890',
190 'rhodecode_captcha_public_key': '1234567890',
190 'rhodecode_captcha_public_key': '1234567890',
191 'csrf_token': csrf_token,
191 'csrf_token': csrf_token,
192 })
192 })
193
193
194 response = self.app.get(ADMIN_PREFIX + '/register')
194 response = self.app.get(ADMIN_PREFIX + '/register')
195 response.mustcontain('captcha')
195 response.mustcontain('captcha')
196
196
197 def test_captcha_deactivate(self, csrf_token):
197 def test_captcha_deactivate(self, csrf_token):
198 self.post_and_verify_settings({
198 self.post_and_verify_settings({
199 'rhodecode_captcha_private_key': '',
199 'rhodecode_captcha_private_key': '',
200 'rhodecode_captcha_public_key': '1234567890',
200 'rhodecode_captcha_public_key': '1234567890',
201 'csrf_token': csrf_token,
201 'csrf_token': csrf_token,
202 })
202 })
203
203
204 response = self.app.get(ADMIN_PREFIX + '/register')
204 response = self.app.get(ADMIN_PREFIX + '/register')
205 response.mustcontain(no=['captcha'])
205 response.mustcontain(no=['captcha'])
206
206
207 def test_title_change(self, csrf_token):
207 def test_title_change(self, csrf_token):
208 old_title = 'RhodeCode'
208 old_title = 'RhodeCode'
209
209
210 for new_title in ['Changed', 'Żółwik', old_title]:
210 for new_title in ['Changed', 'Żółwik', old_title]:
211 response = self.post_and_verify_settings({
211 response = self.post_and_verify_settings({
212 'rhodecode_title': new_title,
212 'rhodecode_title': new_title,
213 'csrf_token': csrf_token,
213 'csrf_token': csrf_token,
214 })
214 })
215
215
216 response = response.follow()
216 response = response.follow()
217 response.mustcontain(new_title)
217 response.mustcontain(new_title)
218
218
219 def post_and_verify_settings(self, settings):
219 def post_and_verify_settings(self, settings):
220 old_title = 'RhodeCode'
220 old_title = 'RhodeCode'
221 old_realm = 'RhodeCode authentication'
221 old_realm = 'RhodeCode authentication'
222 params = {
222 params = {
223 'rhodecode_title': old_title,
223 'rhodecode_title': old_title,
224 'rhodecode_realm': old_realm,
224 'rhodecode_realm': old_realm,
225 'rhodecode_pre_code': '',
225 'rhodecode_pre_code': '',
226 'rhodecode_post_code': '',
226 'rhodecode_post_code': '',
227 'rhodecode_captcha_private_key': '',
227 'rhodecode_captcha_private_key': '',
228 'rhodecode_captcha_public_key': '',
228 'rhodecode_captcha_public_key': '',
229 'rhodecode_create_personal_repo_group': False,
229 'rhodecode_create_personal_repo_group': False,
230 'rhodecode_personal_repo_group_pattern': '${username}',
230 'rhodecode_personal_repo_group_pattern': '${username}',
231 }
231 }
232 params.update(settings)
232 params.update(settings)
233 response = self.app.post(
233 response = self.app.post(
234 route_path('admin_settings_global_update'), params=params)
234 route_path('admin_settings_global_update'), params=params)
235
235
236 assert_session_flash(response, 'Updated application settings')
236 assert_session_flash(response, 'Updated application settings')
237 app_settings = SettingsModel().get_all_settings()
237 app_settings = SettingsModel().get_all_settings()
238 del settings['csrf_token']
238 del settings['csrf_token']
239 for key, value in settings.iteritems():
239 for key, value in settings.iteritems():
240 assert app_settings[key] == value.decode('utf-8')
240 assert app_settings[key] == value.decode('utf-8')
241
241
242 return response
242 return response
243
243
244
244
245 @pytest.mark.usefixtures('autologin_user', 'app')
245 @pytest.mark.usefixtures('autologin_user', 'app')
246 class TestAdminSettingsVcs(object):
246 class TestAdminSettingsVcs(object):
247
247
248 def test_contains_svn_default_patterns(self):
248 def test_contains_svn_default_patterns(self):
249 response = self.app.get(route_path('admin_settings_vcs'))
249 response = self.app.get(route_path('admin_settings_vcs'))
250 expected_patterns = [
250 expected_patterns = [
251 '/trunk',
251 '/trunk',
252 '/branches/*',
252 '/branches/*',
253 '/tags/*',
253 '/tags/*',
254 ]
254 ]
255 for pattern in expected_patterns:
255 for pattern in expected_patterns:
256 response.mustcontain(pattern)
256 response.mustcontain(pattern)
257
257
258 def test_add_new_svn_branch_and_tag_pattern(
258 def test_add_new_svn_branch_and_tag_pattern(
259 self, backend_svn, form_defaults, disable_sql_cache,
259 self, backend_svn, form_defaults, disable_sql_cache,
260 csrf_token):
260 csrf_token):
261 form_defaults.update({
261 form_defaults.update({
262 'new_svn_branch': '/exp/branches/*',
262 'new_svn_branch': '/exp/branches/*',
263 'new_svn_tag': '/important_tags/*',
263 'new_svn_tag': '/important_tags/*',
264 'csrf_token': csrf_token,
264 'csrf_token': csrf_token,
265 })
265 })
266
266
267 response = self.app.post(
267 response = self.app.post(
268 route_path('admin_settings_vcs_update'),
268 route_path('admin_settings_vcs_update'),
269 params=form_defaults, status=302)
269 params=form_defaults, status=302)
270 response = response.follow()
270 response = response.follow()
271
271
272 # Expect to find the new values on the page
272 # Expect to find the new values on the page
273 response.mustcontain('/exp/branches/*')
273 response.mustcontain('/exp/branches/*')
274 response.mustcontain('/important_tags/*')
274 response.mustcontain('/important_tags/*')
275
275
276 # Expect that those patterns are used to match branches and tags now
276 # Expect that those patterns are used to match branches and tags now
277 repo = backend_svn['svn-simple-layout'].scm_instance()
277 repo = backend_svn['svn-simple-layout'].scm_instance()
278 assert 'exp/branches/exp-sphinx-docs' in repo.branches
278 assert 'exp/branches/exp-sphinx-docs' in repo.branches
279 assert 'important_tags/v0.5' in repo.tags
279 assert 'important_tags/v0.5' in repo.tags
280
280
281 def test_add_same_svn_value_twice_shows_an_error_message(
281 def test_add_same_svn_value_twice_shows_an_error_message(
282 self, form_defaults, csrf_token, settings_util):
282 self, form_defaults, csrf_token, settings_util):
283 settings_util.create_rhodecode_ui('vcs_svn_branch', '/test')
283 settings_util.create_rhodecode_ui('vcs_svn_branch', '/test')
284 settings_util.create_rhodecode_ui('vcs_svn_tag', '/test')
284 settings_util.create_rhodecode_ui('vcs_svn_tag', '/test')
285
285
286 response = self.app.post(
286 response = self.app.post(
287 route_path('admin_settings_vcs_update'),
287 route_path('admin_settings_vcs_update'),
288 params={
288 params={
289 'paths_root_path': form_defaults['paths_root_path'],
289 'paths_root_path': form_defaults['paths_root_path'],
290 'new_svn_branch': '/test',
290 'new_svn_branch': '/test',
291 'new_svn_tag': '/test',
291 'new_svn_tag': '/test',
292 'csrf_token': csrf_token,
292 'csrf_token': csrf_token,
293 },
293 },
294 status=200)
294 status=200)
295
295
296 response.mustcontain("Pattern already exists")
296 response.mustcontain("Pattern already exists")
297 response.mustcontain("Some form inputs contain invalid data.")
297 response.mustcontain("Some form inputs contain invalid data.")
298
298
299 @pytest.mark.parametrize('section', [
299 @pytest.mark.parametrize('section', [
300 'vcs_svn_branch',
300 'vcs_svn_branch',
301 'vcs_svn_tag',
301 'vcs_svn_tag',
302 ])
302 ])
303 def test_delete_svn_patterns(
303 def test_delete_svn_patterns(
304 self, section, csrf_token, settings_util):
304 self, section, csrf_token, settings_util):
305 setting = settings_util.create_rhodecode_ui(
305 setting = settings_util.create_rhodecode_ui(
306 section, '/test_delete', cleanup=False)
306 section, '/test_delete', cleanup=False)
307
307
308 self.app.post(
308 self.app.post(
309 route_path('admin_settings_vcs_svn_pattern_delete'),
309 route_path('admin_settings_vcs_svn_pattern_delete'),
310 params={
310 params={
311 'delete_svn_pattern': setting.ui_id,
311 'delete_svn_pattern': setting.ui_id,
312 'csrf_token': csrf_token},
312 'csrf_token': csrf_token},
313 headers={'X-REQUESTED-WITH': 'XMLHttpRequest'})
313 headers={'X-REQUESTED-WITH': 'XMLHttpRequest'})
314
314
315 @pytest.mark.parametrize('section', [
315 @pytest.mark.parametrize('section', [
316 'vcs_svn_branch',
316 'vcs_svn_branch',
317 'vcs_svn_tag',
317 'vcs_svn_tag',
318 ])
318 ])
319 def test_delete_svn_patterns_raises_404_when_no_xhr(
319 def test_delete_svn_patterns_raises_404_when_no_xhr(
320 self, section, csrf_token, settings_util):
320 self, section, csrf_token, settings_util):
321 setting = settings_util.create_rhodecode_ui(section, '/test_delete')
321 setting = settings_util.create_rhodecode_ui(section, '/test_delete')
322
322
323 self.app.post(
323 self.app.post(
324 route_path('admin_settings_vcs_svn_pattern_delete'),
324 route_path('admin_settings_vcs_svn_pattern_delete'),
325 params={
325 params={
326 'delete_svn_pattern': setting.ui_id,
326 'delete_svn_pattern': setting.ui_id,
327 'csrf_token': csrf_token},
327 'csrf_token': csrf_token},
328 status=404)
328 status=404)
329
329
330 def test_extensions_hgsubversion(self, form_defaults, csrf_token):
330 def test_extensions_hgsubversion(self, form_defaults, csrf_token):
331 form_defaults.update({
331 form_defaults.update({
332 'csrf_token': csrf_token,
332 'csrf_token': csrf_token,
333 'extensions_hgsubversion': 'True',
333 'extensions_hgsubversion': 'True',
334 })
334 })
335 response = self.app.post(
335 response = self.app.post(
336 route_path('admin_settings_vcs_update'),
336 route_path('admin_settings_vcs_update'),
337 params=form_defaults,
337 params=form_defaults,
338 status=302)
338 status=302)
339
339
340 response = response.follow()
340 response = response.follow()
341 extensions_input = (
341 extensions_input = (
342 '<input id="extensions_hgsubversion" '
342 '<input id="extensions_hgsubversion" '
343 'name="extensions_hgsubversion" type="checkbox" '
343 'name="extensions_hgsubversion" type="checkbox" '
344 'value="True" checked="checked" />')
344 'value="True" checked="checked" />')
345 response.mustcontain(extensions_input)
345 response.mustcontain(extensions_input)
346
346
347 def test_extensions_hgevolve(self, form_defaults, csrf_token):
347 def test_extensions_hgevolve(self, form_defaults, csrf_token):
348 form_defaults.update({
348 form_defaults.update({
349 'csrf_token': csrf_token,
349 'csrf_token': csrf_token,
350 'extensions_evolve': 'True',
350 'extensions_evolve': 'True',
351 })
351 })
352 response = self.app.post(
352 response = self.app.post(
353 route_path('admin_settings_vcs_update'),
353 route_path('admin_settings_vcs_update'),
354 params=form_defaults,
354 params=form_defaults,
355 status=302)
355 status=302)
356
356
357 response = response.follow()
357 response = response.follow()
358 extensions_input = (
358 extensions_input = (
359 '<input id="extensions_evolve" '
359 '<input id="extensions_evolve" '
360 'name="extensions_evolve" type="checkbox" '
360 'name="extensions_evolve" type="checkbox" '
361 'value="True" checked="checked" />')
361 'value="True" checked="checked" />')
362 response.mustcontain(extensions_input)
362 response.mustcontain(extensions_input)
363
363
364 def test_has_a_section_for_pull_request_settings(self):
364 def test_has_a_section_for_pull_request_settings(self):
365 response = self.app.get(route_path('admin_settings_vcs'))
365 response = self.app.get(route_path('admin_settings_vcs'))
366 response.mustcontain('Pull Request Settings')
366 response.mustcontain('Pull Request Settings')
367
367
368 def test_has_an_input_for_invalidation_of_inline_comments(self):
368 def test_has_an_input_for_invalidation_of_inline_comments(self):
369 response = self.app.get(route_path('admin_settings_vcs'))
369 response = self.app.get(route_path('admin_settings_vcs'))
370 assert_response = AssertResponse(response)
370 assert_response = AssertResponse(response)
371 assert_response.one_element_exists(
371 assert_response.one_element_exists(
372 '[name=rhodecode_use_outdated_comments]')
372 '[name=rhodecode_use_outdated_comments]')
373
373
374 @pytest.mark.parametrize('new_value', [True, False])
374 @pytest.mark.parametrize('new_value', [True, False])
375 def test_allows_to_change_invalidation_of_inline_comments(
375 def test_allows_to_change_invalidation_of_inline_comments(
376 self, form_defaults, csrf_token, new_value):
376 self, form_defaults, csrf_token, new_value):
377 setting_key = 'use_outdated_comments'
377 setting_key = 'use_outdated_comments'
378 setting = SettingsModel().create_or_update_setting(
378 setting = SettingsModel().create_or_update_setting(
379 setting_key, not new_value, 'bool')
379 setting_key, not new_value, 'bool')
380 Session().add(setting)
380 Session().add(setting)
381 Session().commit()
381 Session().commit()
382
382
383 form_defaults.update({
383 form_defaults.update({
384 'csrf_token': csrf_token,
384 'csrf_token': csrf_token,
385 'rhodecode_use_outdated_comments': str(new_value),
385 'rhodecode_use_outdated_comments': str(new_value),
386 })
386 })
387 response = self.app.post(
387 response = self.app.post(
388 route_path('admin_settings_vcs_update'),
388 route_path('admin_settings_vcs_update'),
389 params=form_defaults,
389 params=form_defaults,
390 status=302)
390 status=302)
391 response = response.follow()
391 response = response.follow()
392 setting = SettingsModel().get_setting_by_name(setting_key)
392 setting = SettingsModel().get_setting_by_name(setting_key)
393 assert setting.app_settings_value is new_value
393 assert setting.app_settings_value is new_value
394
394
395 @pytest.mark.parametrize('new_value', [True, False])
395 @pytest.mark.parametrize('new_value', [True, False])
396 def test_allows_to_change_hg_rebase_merge_strategy(
396 def test_allows_to_change_hg_rebase_merge_strategy(
397 self, form_defaults, csrf_token, new_value):
397 self, form_defaults, csrf_token, new_value):
398 setting_key = 'hg_use_rebase_for_merging'
398 setting_key = 'hg_use_rebase_for_merging'
399
399
400 form_defaults.update({
400 form_defaults.update({
401 'csrf_token': csrf_token,
401 'csrf_token': csrf_token,
402 'rhodecode_' + setting_key: str(new_value),
402 'rhodecode_' + setting_key: str(new_value),
403 })
403 })
404
404
405 with mock.patch.dict(
405 with mock.patch.dict(
406 rhodecode.CONFIG, {'labs_settings_active': 'true'}):
406 rhodecode.CONFIG, {'labs_settings_active': 'true'}):
407 self.app.post(
407 self.app.post(
408 route_path('admin_settings_vcs_update'),
408 route_path('admin_settings_vcs_update'),
409 params=form_defaults,
409 params=form_defaults,
410 status=302)
410 status=302)
411
411
412 setting = SettingsModel().get_setting_by_name(setting_key)
412 setting = SettingsModel().get_setting_by_name(setting_key)
413 assert setting.app_settings_value is new_value
413 assert setting.app_settings_value is new_value
414
414
415 @pytest.fixture
415 @pytest.fixture()
416 def disable_sql_cache(self, request):
416 def disable_sql_cache(self, request):
417 patcher = mock.patch(
417 patcher = mock.patch(
418 'rhodecode.lib.caching_query.FromCache.process_query')
418 'rhodecode.lib.caching_query.FromCache.process_query')
419 request.addfinalizer(patcher.stop)
419 request.addfinalizer(patcher.stop)
420 patcher.start()
420 patcher.start()
421
421
422 @pytest.fixture
422 @pytest.fixture()
423 def form_defaults(self):
423 def form_defaults(self):
424 from rhodecode.apps.admin.views.settings import AdminSettingsView
424 from rhodecode.apps.admin.views.settings import AdminSettingsView
425 return AdminSettingsView._form_defaults()
425 return AdminSettingsView._form_defaults()
426
426
427 # TODO: johbo: What we really want is to checkpoint before a test run and
427 # TODO: johbo: What we really want is to checkpoint before a test run and
428 # reset the session afterwards.
428 # reset the session afterwards.
429 @pytest.fixture(scope='class', autouse=True)
429 @pytest.fixture(scope='class', autouse=True)
430 def cleanup_settings(self, request, baseapp):
430 def cleanup_settings(self, request, baseapp):
431 ui_id = RhodeCodeUi.ui_id
431 ui_id = RhodeCodeUi.ui_id
432 original_ids = list(
432 original_ids = list(
433 r.ui_id for r in RhodeCodeUi.query().values(ui_id))
433 r.ui_id for r in RhodeCodeUi.query().values(ui_id))
434
434
435 @request.addfinalizer
435 @request.addfinalizer
436 def cleanup():
436 def cleanup():
437 RhodeCodeUi.query().filter(
437 RhodeCodeUi.query().filter(
438 ui_id.notin_(original_ids)).delete(False)
438 ui_id.notin_(original_ids)).delete(False)
439
439
440
440
441 @pytest.mark.usefixtures('autologin_user', 'app')
441 @pytest.mark.usefixtures('autologin_user', 'app')
442 class TestLabsSettings(object):
442 class TestLabsSettings(object):
443 def test_get_settings_page_disabled(self):
443 def test_get_settings_page_disabled(self):
444 with mock.patch.dict(
444 with mock.patch.dict(
445 rhodecode.CONFIG, {'labs_settings_active': 'false'}):
445 rhodecode.CONFIG, {'labs_settings_active': 'false'}):
446
446
447 response = self.app.get(
447 response = self.app.get(
448 route_path('admin_settings_labs'), status=302)
448 route_path('admin_settings_labs'), status=302)
449
449
450 assert response.location.endswith(route_path('admin_settings'))
450 assert response.location.endswith(route_path('admin_settings'))
451
451
452 def test_get_settings_page_enabled(self):
452 def test_get_settings_page_enabled(self):
453 from rhodecode.apps.admin.views import settings
453 from rhodecode.apps.admin.views import settings
454 lab_settings = [
454 lab_settings = [
455 settings.LabSetting(
455 settings.LabSetting(
456 key='rhodecode_bool',
456 key='rhodecode_bool',
457 type='bool',
457 type='bool',
458 group='bool group',
458 group='bool group',
459 label='bool label',
459 label='bool label',
460 help='bool help'
460 help='bool help'
461 ),
461 ),
462 settings.LabSetting(
462 settings.LabSetting(
463 key='rhodecode_text',
463 key='rhodecode_text',
464 type='unicode',
464 type='unicode',
465 group='text group',
465 group='text group',
466 label='text label',
466 label='text label',
467 help='text help'
467 help='text help'
468 ),
468 ),
469 ]
469 ]
470 with mock.patch.dict(rhodecode.CONFIG,
470 with mock.patch.dict(rhodecode.CONFIG,
471 {'labs_settings_active': 'true'}):
471 {'labs_settings_active': 'true'}):
472 with mock.patch.object(settings, '_LAB_SETTINGS', lab_settings):
472 with mock.patch.object(settings, '_LAB_SETTINGS', lab_settings):
473 response = self.app.get(route_path('admin_settings_labs'))
473 response = self.app.get(route_path('admin_settings_labs'))
474
474
475 assert '<label>bool group:</label>' in response
475 assert '<label>bool group:</label>' in response
476 assert '<label for="rhodecode_bool">bool label</label>' in response
476 assert '<label for="rhodecode_bool">bool label</label>' in response
477 assert '<p class="help-block">bool help</p>' in response
477 assert '<p class="help-block">bool help</p>' in response
478 assert 'name="rhodecode_bool" type="checkbox"' in response
478 assert 'name="rhodecode_bool" type="checkbox"' in response
479
479
480 assert '<label>text group:</label>' in response
480 assert '<label>text group:</label>' in response
481 assert '<label for="rhodecode_text">text label</label>' in response
481 assert '<label for="rhodecode_text">text label</label>' in response
482 assert '<p class="help-block">text help</p>' in response
482 assert '<p class="help-block">text help</p>' in response
483 assert 'name="rhodecode_text" size="60" type="text"' in response
483 assert 'name="rhodecode_text" size="60" type="text"' in response
484
484
485
485
486 @pytest.mark.usefixtures('app')
486 @pytest.mark.usefixtures('app')
487 class TestOpenSourceLicenses(object):
487 class TestOpenSourceLicenses(object):
488
488
489 def test_records_are_displayed(self, autologin_user):
489 def test_records_are_displayed(self, autologin_user):
490 sample_licenses = [
490 sample_licenses = [
491 {
491 {
492 "license": [
492 "license": [
493 {
493 {
494 "fullName": "BSD 4-clause \"Original\" or \"Old\" License",
494 "fullName": "BSD 4-clause \"Original\" or \"Old\" License",
495 "shortName": "bsdOriginal",
495 "shortName": "bsdOriginal",
496 "spdxId": "BSD-4-Clause",
496 "spdxId": "BSD-4-Clause",
497 "url": "http://spdx.org/licenses/BSD-4-Clause.html"
497 "url": "http://spdx.org/licenses/BSD-4-Clause.html"
498 }
498 }
499 ],
499 ],
500 "name": "python2.7-coverage-3.7.1"
500 "name": "python2.7-coverage-3.7.1"
501 },
501 },
502 {
502 {
503 "license": [
503 "license": [
504 {
504 {
505 "fullName": "MIT License",
505 "fullName": "MIT License",
506 "shortName": "mit",
506 "shortName": "mit",
507 "spdxId": "MIT",
507 "spdxId": "MIT",
508 "url": "http://spdx.org/licenses/MIT.html"
508 "url": "http://spdx.org/licenses/MIT.html"
509 }
509 }
510 ],
510 ],
511 "name": "python2.7-bootstrapped-pip-9.0.1"
511 "name": "python2.7-bootstrapped-pip-9.0.1"
512 },
512 },
513 ]
513 ]
514 read_licenses_patch = mock.patch(
514 read_licenses_patch = mock.patch(
515 'rhodecode.apps.admin.views.open_source_licenses.read_opensource_licenses',
515 'rhodecode.apps.admin.views.open_source_licenses.read_opensource_licenses',
516 return_value=sample_licenses)
516 return_value=sample_licenses)
517 with read_licenses_patch:
517 with read_licenses_patch:
518 response = self.app.get(
518 response = self.app.get(
519 route_path('admin_settings_open_source'), status=200)
519 route_path('admin_settings_open_source'), status=200)
520
520
521 assert_response = AssertResponse(response)
521 assert_response = AssertResponse(response)
522 assert_response.element_contains(
522 assert_response.element_contains(
523 '.panel-heading', 'Licenses of Third Party Packages')
523 '.panel-heading', 'Licenses of Third Party Packages')
524 for license_data in sample_licenses:
524 for license_data in sample_licenses:
525 response.mustcontain(license_data["license"][0]["spdxId"])
525 response.mustcontain(license_data["license"][0]["spdxId"])
526 assert_response.element_contains('.panel-body', license_data["name"])
526 assert_response.element_contains('.panel-body', license_data["name"])
527
527
528 def test_records_can_be_read(self, autologin_user):
528 def test_records_can_be_read(self, autologin_user):
529 response = self.app.get(
529 response = self.app.get(
530 route_path('admin_settings_open_source'), status=200)
530 route_path('admin_settings_open_source'), status=200)
531 assert_response = AssertResponse(response)
531 assert_response = AssertResponse(response)
532 assert_response.element_contains(
532 assert_response.element_contains(
533 '.panel-heading', 'Licenses of Third Party Packages')
533 '.panel-heading', 'Licenses of Third Party Packages')
534
534
535 def test_forbidden_when_normal_user(self, autologin_regular_user):
535 def test_forbidden_when_normal_user(self, autologin_regular_user):
536 self.app.get(
536 self.app.get(
537 route_path('admin_settings_open_source'), status=404)
537 route_path('admin_settings_open_source'), status=404)
538
538
539
539
540 @pytest.mark.usefixtures('app')
540 @pytest.mark.usefixtures('app')
541 class TestUserSessions(object):
541 class TestUserSessions(object):
542
542
543 def test_forbidden_when_normal_user(self, autologin_regular_user):
543 def test_forbidden_when_normal_user(self, autologin_regular_user):
544 self.app.get(route_path('admin_settings_sessions'), status=404)
544 self.app.get(route_path('admin_settings_sessions'), status=404)
545
545
546 def test_show_sessions_page(self, autologin_user):
546 def test_show_sessions_page(self, autologin_user):
547 response = self.app.get(route_path('admin_settings_sessions'), status=200)
547 response = self.app.get(route_path('admin_settings_sessions'), status=200)
548 response.mustcontain('file')
548 response.mustcontain('file')
549
549
550 def test_cleanup_old_sessions(self, autologin_user, csrf_token):
550 def test_cleanup_old_sessions(self, autologin_user, csrf_token):
551
551
552 post_data = {
552 post_data = {
553 'csrf_token': csrf_token,
553 'csrf_token': csrf_token,
554 'expire_days': '60'
554 'expire_days': '60'
555 }
555 }
556 response = self.app.post(
556 response = self.app.post(
557 route_path('admin_settings_sessions_cleanup'), params=post_data,
557 route_path('admin_settings_sessions_cleanup'), params=post_data,
558 status=302)
558 status=302)
559 assert_session_flash(response, 'Cleaned up old sessions')
559 assert_session_flash(response, 'Cleaned up old sessions')
560
560
561
561
562 @pytest.mark.usefixtures('app')
562 @pytest.mark.usefixtures('app')
563 class TestAdminSystemInfo(object):
563 class TestAdminSystemInfo(object):
564
564
565 def test_forbidden_when_normal_user(self, autologin_regular_user):
565 def test_forbidden_when_normal_user(self, autologin_regular_user):
566 self.app.get(route_path('admin_settings_system'), status=404)
566 self.app.get(route_path('admin_settings_system'), status=404)
567
567
568 def test_system_info_page(self, autologin_user):
568 def test_system_info_page(self, autologin_user):
569 response = self.app.get(route_path('admin_settings_system'))
569 response = self.app.get(route_path('admin_settings_system'))
570 response.mustcontain('RhodeCode Community Edition, version {}'.format(
570 response.mustcontain('RhodeCode Community Edition, version {}'.format(
571 rhodecode.__version__))
571 rhodecode.__version__))
572
572
573 def test_system_update_new_version(self, autologin_user):
573 def test_system_update_new_version(self, autologin_user):
574 update_data = {
574 update_data = {
575 'versions': [
575 'versions': [
576 {
576 {
577 'version': '100.3.1415926535',
577 'version': '100.3.1415926535',
578 'general': 'The latest version we are ever going to ship'
578 'general': 'The latest version we are ever going to ship'
579 },
579 },
580 {
580 {
581 'version': '0.0.0',
581 'version': '0.0.0',
582 'general': 'The first version we ever shipped'
582 'general': 'The first version we ever shipped'
583 }
583 }
584 ]
584 ]
585 }
585 }
586 with mock.patch(UPDATE_DATA_QUALNAME, return_value=update_data):
586 with mock.patch(UPDATE_DATA_QUALNAME, return_value=update_data):
587 response = self.app.get(route_path('admin_settings_system_update'))
587 response = self.app.get(route_path('admin_settings_system_update'))
588 response.mustcontain('A <b>new version</b> is available')
588 response.mustcontain('A <b>new version</b> is available')
589
589
590 def test_system_update_nothing_new(self, autologin_user):
590 def test_system_update_nothing_new(self, autologin_user):
591 update_data = {
591 update_data = {
592 'versions': [
592 'versions': [
593 {
593 {
594 'version': '0.0.0',
594 'version': '0.0.0',
595 'general': 'The first version we ever shipped'
595 'general': 'The first version we ever shipped'
596 }
596 }
597 ]
597 ]
598 }
598 }
599 with mock.patch(UPDATE_DATA_QUALNAME, return_value=update_data):
599 with mock.patch(UPDATE_DATA_QUALNAME, return_value=update_data):
600 response = self.app.get(route_path('admin_settings_system_update'))
600 response = self.app.get(route_path('admin_settings_system_update'))
601 response.mustcontain(
601 response.mustcontain(
602 'This instance is already running the <b>latest</b> stable version')
602 'This instance is already running the <b>latest</b> stable version')
603
603
604 def test_system_update_bad_response(self, autologin_user):
604 def test_system_update_bad_response(self, autologin_user):
605 with mock.patch(UPDATE_DATA_QUALNAME, side_effect=ValueError('foo')):
605 with mock.patch(UPDATE_DATA_QUALNAME, side_effect=ValueError('foo')):
606 response = self.app.get(route_path('admin_settings_system_update'))
606 response = self.app.get(route_path('admin_settings_system_update'))
607 response.mustcontain(
607 response.mustcontain(
608 'Bad data sent from update server')
608 'Bad data sent from update server')
609
609
610
610
611 @pytest.mark.usefixtures("app")
611 @pytest.mark.usefixtures("app")
612 class TestAdminSettingsIssueTracker(object):
612 class TestAdminSettingsIssueTracker(object):
613 RC_PREFIX = 'rhodecode_'
613 RC_PREFIX = 'rhodecode_'
614 SHORT_PATTERN_KEY = 'issuetracker_pat_'
614 SHORT_PATTERN_KEY = 'issuetracker_pat_'
615 PATTERN_KEY = RC_PREFIX + SHORT_PATTERN_KEY
615 PATTERN_KEY = RC_PREFIX + SHORT_PATTERN_KEY
616
616
617 def test_issuetracker_index(self, autologin_user):
617 def test_issuetracker_index(self, autologin_user):
618 response = self.app.get(route_path('admin_settings_issuetracker'))
618 response = self.app.get(route_path('admin_settings_issuetracker'))
619 assert response.status_code == 200
619 assert response.status_code == 200
620
620
621 def test_add_empty_issuetracker_pattern(
621 def test_add_empty_issuetracker_pattern(
622 self, request, autologin_user, csrf_token):
622 self, request, autologin_user, csrf_token):
623 post_url = route_path('admin_settings_issuetracker_update')
623 post_url = route_path('admin_settings_issuetracker_update')
624 post_data = {
624 post_data = {
625 'csrf_token': csrf_token
625 'csrf_token': csrf_token
626 }
626 }
627 self.app.post(post_url, post_data, status=302)
627 self.app.post(post_url, post_data, status=302)
628
628
629 def test_add_issuetracker_pattern(
629 def test_add_issuetracker_pattern(
630 self, request, autologin_user, csrf_token):
630 self, request, autologin_user, csrf_token):
631 pattern = 'issuetracker_pat'
631 pattern = 'issuetracker_pat'
632 another_pattern = pattern+'1'
632 another_pattern = pattern+'1'
633 post_url = route_path('admin_settings_issuetracker_update')
633 post_url = route_path('admin_settings_issuetracker_update')
634 post_data = {
634 post_data = {
635 'new_pattern_pattern_0': pattern,
635 'new_pattern_pattern_0': pattern,
636 'new_pattern_url_0': 'http://url',
636 'new_pattern_url_0': 'http://url',
637 'new_pattern_prefix_0': 'prefix',
637 'new_pattern_prefix_0': 'prefix',
638 'new_pattern_description_0': 'description',
638 'new_pattern_description_0': 'description',
639 'new_pattern_pattern_1': another_pattern,
639 'new_pattern_pattern_1': another_pattern,
640 'new_pattern_url_1': 'https://url1',
640 'new_pattern_url_1': 'https://url1',
641 'new_pattern_prefix_1': 'prefix1',
641 'new_pattern_prefix_1': 'prefix1',
642 'new_pattern_description_1': 'description1',
642 'new_pattern_description_1': 'description1',
643 'csrf_token': csrf_token
643 'csrf_token': csrf_token
644 }
644 }
645 self.app.post(post_url, post_data, status=302)
645 self.app.post(post_url, post_data, status=302)
646 settings = SettingsModel().get_all_settings()
646 settings = SettingsModel().get_all_settings()
647 self.uid = md5(pattern)
647 self.uid = md5(pattern)
648 assert settings[self.PATTERN_KEY+self.uid] == pattern
648 assert settings[self.PATTERN_KEY+self.uid] == pattern
649 self.another_uid = md5(another_pattern)
649 self.another_uid = md5(another_pattern)
650 assert settings[self.PATTERN_KEY+self.another_uid] == another_pattern
650 assert settings[self.PATTERN_KEY+self.another_uid] == another_pattern
651
651
652 @request.addfinalizer
652 @request.addfinalizer
653 def cleanup():
653 def cleanup():
654 defaults = SettingsModel().get_all_settings()
654 defaults = SettingsModel().get_all_settings()
655
655
656 entries = [name for name in defaults if (
656 entries = [name for name in defaults if (
657 (self.uid in name) or (self.another_uid) in name)]
657 (self.uid in name) or (self.another_uid) in name)]
658 start = len(self.RC_PREFIX)
658 start = len(self.RC_PREFIX)
659 for del_key in entries:
659 for del_key in entries:
660 # TODO: anderson: get_by_name needs name without prefix
660 # TODO: anderson: get_by_name needs name without prefix
661 entry = SettingsModel().get_setting_by_name(del_key[start:])
661 entry = SettingsModel().get_setting_by_name(del_key[start:])
662 Session().delete(entry)
662 Session().delete(entry)
663
663
664 Session().commit()
664 Session().commit()
665
665
666 def test_edit_issuetracker_pattern(
666 def test_edit_issuetracker_pattern(
667 self, autologin_user, backend, csrf_token, request):
667 self, autologin_user, backend, csrf_token, request):
668 old_pattern = 'issuetracker_pat'
668 old_pattern = 'issuetracker_pat'
669 old_uid = md5(old_pattern)
669 old_uid = md5(old_pattern)
670 pattern = 'issuetracker_pat_new'
670 pattern = 'issuetracker_pat_new'
671 self.new_uid = md5(pattern)
671 self.new_uid = md5(pattern)
672
672
673 SettingsModel().create_or_update_setting(
673 SettingsModel().create_or_update_setting(
674 self.SHORT_PATTERN_KEY+old_uid, old_pattern, 'unicode')
674 self.SHORT_PATTERN_KEY+old_uid, old_pattern, 'unicode')
675
675
676 post_url = route_path('admin_settings_issuetracker_update')
676 post_url = route_path('admin_settings_issuetracker_update')
677 post_data = {
677 post_data = {
678 'new_pattern_pattern_0': pattern,
678 'new_pattern_pattern_0': pattern,
679 'new_pattern_url_0': 'https://url',
679 'new_pattern_url_0': 'https://url',
680 'new_pattern_prefix_0': 'prefix',
680 'new_pattern_prefix_0': 'prefix',
681 'new_pattern_description_0': 'description',
681 'new_pattern_description_0': 'description',
682 'uid': old_uid,
682 'uid': old_uid,
683 'csrf_token': csrf_token
683 'csrf_token': csrf_token
684 }
684 }
685 self.app.post(post_url, post_data, status=302)
685 self.app.post(post_url, post_data, status=302)
686 settings = SettingsModel().get_all_settings()
686 settings = SettingsModel().get_all_settings()
687 assert settings[self.PATTERN_KEY+self.new_uid] == pattern
687 assert settings[self.PATTERN_KEY+self.new_uid] == pattern
688 assert self.PATTERN_KEY+old_uid not in settings
688 assert self.PATTERN_KEY+old_uid not in settings
689
689
690 @request.addfinalizer
690 @request.addfinalizer
691 def cleanup():
691 def cleanup():
692 IssueTrackerSettingsModel().delete_entries(self.new_uid)
692 IssueTrackerSettingsModel().delete_entries(self.new_uid)
693
693
694 def test_replace_issuetracker_pattern_description(
694 def test_replace_issuetracker_pattern_description(
695 self, autologin_user, csrf_token, request, settings_util):
695 self, autologin_user, csrf_token, request, settings_util):
696 prefix = 'issuetracker'
696 prefix = 'issuetracker'
697 pattern = 'issuetracker_pat'
697 pattern = 'issuetracker_pat'
698 self.uid = md5(pattern)
698 self.uid = md5(pattern)
699 pattern_key = '_'.join([prefix, 'pat', self.uid])
699 pattern_key = '_'.join([prefix, 'pat', self.uid])
700 rc_pattern_key = '_'.join(['rhodecode', pattern_key])
700 rc_pattern_key = '_'.join(['rhodecode', pattern_key])
701 desc_key = '_'.join([prefix, 'desc', self.uid])
701 desc_key = '_'.join([prefix, 'desc', self.uid])
702 rc_desc_key = '_'.join(['rhodecode', desc_key])
702 rc_desc_key = '_'.join(['rhodecode', desc_key])
703 new_description = 'new_description'
703 new_description = 'new_description'
704
704
705 settings_util.create_rhodecode_setting(
705 settings_util.create_rhodecode_setting(
706 pattern_key, pattern, 'unicode', cleanup=False)
706 pattern_key, pattern, 'unicode', cleanup=False)
707 settings_util.create_rhodecode_setting(
707 settings_util.create_rhodecode_setting(
708 desc_key, 'old description', 'unicode', cleanup=False)
708 desc_key, 'old description', 'unicode', cleanup=False)
709
709
710 post_url = route_path('admin_settings_issuetracker_update')
710 post_url = route_path('admin_settings_issuetracker_update')
711 post_data = {
711 post_data = {
712 'new_pattern_pattern_0': pattern,
712 'new_pattern_pattern_0': pattern,
713 'new_pattern_url_0': 'https://url',
713 'new_pattern_url_0': 'https://url',
714 'new_pattern_prefix_0': 'prefix',
714 'new_pattern_prefix_0': 'prefix',
715 'new_pattern_description_0': new_description,
715 'new_pattern_description_0': new_description,
716 'uid': self.uid,
716 'uid': self.uid,
717 'csrf_token': csrf_token
717 'csrf_token': csrf_token
718 }
718 }
719 self.app.post(post_url, post_data, status=302)
719 self.app.post(post_url, post_data, status=302)
720 settings = SettingsModel().get_all_settings()
720 settings = SettingsModel().get_all_settings()
721 assert settings[rc_pattern_key] == pattern
721 assert settings[rc_pattern_key] == pattern
722 assert settings[rc_desc_key] == new_description
722 assert settings[rc_desc_key] == new_description
723
723
724 @request.addfinalizer
724 @request.addfinalizer
725 def cleanup():
725 def cleanup():
726 IssueTrackerSettingsModel().delete_entries(self.uid)
726 IssueTrackerSettingsModel().delete_entries(self.uid)
727
727
728 def test_delete_issuetracker_pattern(
728 def test_delete_issuetracker_pattern(
729 self, autologin_user, backend, csrf_token, settings_util):
729 self, autologin_user, backend, csrf_token, settings_util):
730 pattern = 'issuetracker_pat'
730 pattern = 'issuetracker_pat'
731 uid = md5(pattern)
731 uid = md5(pattern)
732 settings_util.create_rhodecode_setting(
732 settings_util.create_rhodecode_setting(
733 self.SHORT_PATTERN_KEY+uid, pattern, 'unicode', cleanup=False)
733 self.SHORT_PATTERN_KEY+uid, pattern, 'unicode', cleanup=False)
734
734
735 post_url = route_path('admin_settings_issuetracker_delete')
735 post_url = route_path('admin_settings_issuetracker_delete')
736 post_data = {
736 post_data = {
737 '_method': 'delete',
737 '_method': 'delete',
738 'uid': uid,
738 'uid': uid,
739 'csrf_token': csrf_token
739 'csrf_token': csrf_token
740 }
740 }
741 self.app.post(post_url, post_data, status=302)
741 self.app.post(post_url, post_data, status=302)
742 settings = SettingsModel().get_all_settings()
742 settings = SettingsModel().get_all_settings()
743 assert 'rhodecode_%s%s' % (self.SHORT_PATTERN_KEY, uid) not in settings
743 assert 'rhodecode_%s%s' % (self.SHORT_PATTERN_KEY, uid) not in settings
@@ -1,391 +1,391 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.lib import helpers as h
24 from rhodecode.lib import helpers as h
25 from rhodecode.model.db import User, Gist
25 from rhodecode.model.db import User, Gist
26 from rhodecode.model.gist import GistModel
26 from rhodecode.model.gist import GistModel
27 from rhodecode.model.meta import Session
27 from rhodecode.model.meta import Session
28 from rhodecode.tests import (
28 from rhodecode.tests import (
29 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
29 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
30 TestController, assert_session_flash)
30 TestController, assert_session_flash)
31
31
32
32
33 def route_path(name, params=None, **kwargs):
33 def route_path(name, params=None, **kwargs):
34 import urllib
34 import urllib
35 from rhodecode.apps._base import ADMIN_PREFIX
35 from rhodecode.apps._base import ADMIN_PREFIX
36
36
37 base_url = {
37 base_url = {
38 'gists_show': ADMIN_PREFIX + '/gists',
38 'gists_show': ADMIN_PREFIX + '/gists',
39 'gists_new': ADMIN_PREFIX + '/gists/new',
39 'gists_new': ADMIN_PREFIX + '/gists/new',
40 'gists_create': ADMIN_PREFIX + '/gists/create',
40 'gists_create': ADMIN_PREFIX + '/gists/create',
41 'gist_show': ADMIN_PREFIX + '/gists/{gist_id}',
41 'gist_show': ADMIN_PREFIX + '/gists/{gist_id}',
42 'gist_delete': ADMIN_PREFIX + '/gists/{gist_id}/delete',
42 'gist_delete': ADMIN_PREFIX + '/gists/{gist_id}/delete',
43 'gist_edit': ADMIN_PREFIX + '/gists/{gist_id}/edit',
43 'gist_edit': ADMIN_PREFIX + '/gists/{gist_id}/edit',
44 'gist_edit_check_revision': ADMIN_PREFIX + '/gists/{gist_id}/edit/check_revision',
44 'gist_edit_check_revision': ADMIN_PREFIX + '/gists/{gist_id}/edit/check_revision',
45 'gist_update': ADMIN_PREFIX + '/gists/{gist_id}/update',
45 'gist_update': ADMIN_PREFIX + '/gists/{gist_id}/update',
46 'gist_show_rev': ADMIN_PREFIX + '/gists/{gist_id}/{revision}',
46 'gist_show_rev': ADMIN_PREFIX + '/gists/{gist_id}/{revision}',
47 'gist_show_formatted': ADMIN_PREFIX + '/gists/{gist_id}/{revision}/{format}',
47 'gist_show_formatted': ADMIN_PREFIX + '/gists/{gist_id}/{revision}/{format}',
48 'gist_show_formatted_path': ADMIN_PREFIX + '/gists/{gist_id}/{revision}/{format}/{f_path}',
48 'gist_show_formatted_path': ADMIN_PREFIX + '/gists/{gist_id}/{revision}/{format}/{f_path}',
49
49
50 }[name].format(**kwargs)
50 }[name].format(**kwargs)
51
51
52 if params:
52 if params:
53 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
53 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
54 return base_url
54 return base_url
55
55
56
56
57 class GistUtility(object):
57 class GistUtility(object):
58
58
59 def __init__(self):
59 def __init__(self):
60 self._gist_ids = []
60 self._gist_ids = []
61
61
62 def __call__(
62 def __call__(
63 self, f_name, content='some gist', lifetime=-1,
63 self, f_name, content='some gist', lifetime=-1,
64 description='gist-desc', gist_type='public',
64 description='gist-desc', gist_type='public',
65 acl_level=Gist.GIST_PUBLIC, owner=TEST_USER_ADMIN_LOGIN):
65 acl_level=Gist.GIST_PUBLIC, owner=TEST_USER_ADMIN_LOGIN):
66 gist_mapping = {
66 gist_mapping = {
67 f_name: {'content': content}
67 f_name: {'content': content}
68 }
68 }
69 user = User.get_by_username(owner)
69 user = User.get_by_username(owner)
70 gist = GistModel().create(
70 gist = GistModel().create(
71 description, owner=user, gist_mapping=gist_mapping,
71 description, owner=user, gist_mapping=gist_mapping,
72 gist_type=gist_type, lifetime=lifetime, gist_acl_level=acl_level)
72 gist_type=gist_type, lifetime=lifetime, gist_acl_level=acl_level)
73 Session().commit()
73 Session().commit()
74 self._gist_ids.append(gist.gist_id)
74 self._gist_ids.append(gist.gist_id)
75 return gist
75 return gist
76
76
77 def cleanup(self):
77 def cleanup(self):
78 for gist_id in self._gist_ids:
78 for gist_id in self._gist_ids:
79 gist = Gist.get(gist_id)
79 gist = Gist.get(gist_id)
80 if gist:
80 if gist:
81 Session().delete(gist)
81 Session().delete(gist)
82
82
83 Session().commit()
83 Session().commit()
84
84
85
85
86 @pytest.fixture
86 @pytest.fixture()
87 def create_gist(request):
87 def create_gist(request):
88 gist_utility = GistUtility()
88 gist_utility = GistUtility()
89 request.addfinalizer(gist_utility.cleanup)
89 request.addfinalizer(gist_utility.cleanup)
90 return gist_utility
90 return gist_utility
91
91
92
92
93 class TestGistsController(TestController):
93 class TestGistsController(TestController):
94
94
95 def test_index_empty(self, create_gist):
95 def test_index_empty(self, create_gist):
96 self.log_user()
96 self.log_user()
97 response = self.app.get(route_path('gists_show'))
97 response = self.app.get(route_path('gists_show'))
98 response.mustcontain('data: [],')
98 response.mustcontain('data: [],')
99
99
100 def test_index(self, create_gist):
100 def test_index(self, create_gist):
101 self.log_user()
101 self.log_user()
102 g1 = create_gist('gist1')
102 g1 = create_gist('gist1')
103 g2 = create_gist('gist2', lifetime=1400)
103 g2 = create_gist('gist2', lifetime=1400)
104 g3 = create_gist('gist3', description='gist3-desc')
104 g3 = create_gist('gist3', description='gist3-desc')
105 g4 = create_gist('gist4', gist_type='private').gist_access_id
105 g4 = create_gist('gist4', gist_type='private').gist_access_id
106 response = self.app.get(route_path('gists_show'))
106 response = self.app.get(route_path('gists_show'))
107
107
108 response.mustcontain('gist: %s' % g1.gist_access_id)
108 response.mustcontain('gist: %s' % g1.gist_access_id)
109 response.mustcontain('gist: %s' % g2.gist_access_id)
109 response.mustcontain('gist: %s' % g2.gist_access_id)
110 response.mustcontain('gist: %s' % g3.gist_access_id)
110 response.mustcontain('gist: %s' % g3.gist_access_id)
111 response.mustcontain('gist3-desc')
111 response.mustcontain('gist3-desc')
112 response.mustcontain(no=['gist: %s' % g4])
112 response.mustcontain(no=['gist: %s' % g4])
113
113
114 # Expiration information should be visible
114 # Expiration information should be visible
115 expires_tag = '%s' % h.age_component(
115 expires_tag = '%s' % h.age_component(
116 h.time_to_utcdatetime(g2.gist_expires))
116 h.time_to_utcdatetime(g2.gist_expires))
117 response.mustcontain(expires_tag.replace('"', '\\"'))
117 response.mustcontain(expires_tag.replace('"', '\\"'))
118
118
119 def test_index_private_gists(self, create_gist):
119 def test_index_private_gists(self, create_gist):
120 self.log_user()
120 self.log_user()
121 gist = create_gist('gist5', gist_type='private')
121 gist = create_gist('gist5', gist_type='private')
122 response = self.app.get(route_path('gists_show', params=dict(private=1)))
122 response = self.app.get(route_path('gists_show', params=dict(private=1)))
123
123
124 # and privates
124 # and privates
125 response.mustcontain('gist: %s' % gist.gist_access_id)
125 response.mustcontain('gist: %s' % gist.gist_access_id)
126
126
127 def test_index_show_all(self, create_gist):
127 def test_index_show_all(self, create_gist):
128 self.log_user()
128 self.log_user()
129 create_gist('gist1')
129 create_gist('gist1')
130 create_gist('gist2', lifetime=1400)
130 create_gist('gist2', lifetime=1400)
131 create_gist('gist3', description='gist3-desc')
131 create_gist('gist3', description='gist3-desc')
132 create_gist('gist4', gist_type='private')
132 create_gist('gist4', gist_type='private')
133
133
134 response = self.app.get(route_path('gists_show', params=dict(all=1)))
134 response = self.app.get(route_path('gists_show', params=dict(all=1)))
135
135
136 assert len(GistModel.get_all()) == 4
136 assert len(GistModel.get_all()) == 4
137 # and privates
137 # and privates
138 for gist in GistModel.get_all():
138 for gist in GistModel.get_all():
139 response.mustcontain('gist: %s' % gist.gist_access_id)
139 response.mustcontain('gist: %s' % gist.gist_access_id)
140
140
141 def test_index_show_all_hidden_from_regular(self, create_gist):
141 def test_index_show_all_hidden_from_regular(self, create_gist):
142 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
142 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
143 create_gist('gist2', gist_type='private')
143 create_gist('gist2', gist_type='private')
144 create_gist('gist3', gist_type='private')
144 create_gist('gist3', gist_type='private')
145 create_gist('gist4', gist_type='private')
145 create_gist('gist4', gist_type='private')
146
146
147 response = self.app.get(route_path('gists_show', params=dict(all=1)))
147 response = self.app.get(route_path('gists_show', params=dict(all=1)))
148
148
149 assert len(GistModel.get_all()) == 3
149 assert len(GistModel.get_all()) == 3
150 # since we don't have access to private in this view, we
150 # since we don't have access to private in this view, we
151 # should see nothing
151 # should see nothing
152 for gist in GistModel.get_all():
152 for gist in GistModel.get_all():
153 response.mustcontain(no=['gist: %s' % gist.gist_access_id])
153 response.mustcontain(no=['gist: %s' % gist.gist_access_id])
154
154
155 def test_create(self):
155 def test_create(self):
156 self.log_user()
156 self.log_user()
157 response = self.app.post(
157 response = self.app.post(
158 route_path('gists_create'),
158 route_path('gists_create'),
159 params={'lifetime': -1,
159 params={'lifetime': -1,
160 'content': 'gist test',
160 'content': 'gist test',
161 'filename': 'foo',
161 'filename': 'foo',
162 'public': 'public',
162 'public': 'public',
163 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
163 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
164 'csrf_token': self.csrf_token},
164 'csrf_token': self.csrf_token},
165 status=302)
165 status=302)
166 response = response.follow()
166 response = response.follow()
167 response.mustcontain('added file: foo')
167 response.mustcontain('added file: foo')
168 response.mustcontain('gist test')
168 response.mustcontain('gist test')
169
169
170 def test_create_with_path_with_dirs(self):
170 def test_create_with_path_with_dirs(self):
171 self.log_user()
171 self.log_user()
172 response = self.app.post(
172 response = self.app.post(
173 route_path('gists_create'),
173 route_path('gists_create'),
174 params={'lifetime': -1,
174 params={'lifetime': -1,
175 'content': 'gist test',
175 'content': 'gist test',
176 'filename': '/home/foo',
176 'filename': '/home/foo',
177 'public': 'public',
177 'public': 'public',
178 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
178 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
179 'csrf_token': self.csrf_token},
179 'csrf_token': self.csrf_token},
180 status=200)
180 status=200)
181 response.mustcontain('Filename /home/foo cannot be inside a directory')
181 response.mustcontain('Filename /home/foo cannot be inside a directory')
182
182
183 def test_access_expired_gist(self, create_gist):
183 def test_access_expired_gist(self, create_gist):
184 self.log_user()
184 self.log_user()
185 gist = create_gist('never-see-me')
185 gist = create_gist('never-see-me')
186 gist.gist_expires = 0 # 1970
186 gist.gist_expires = 0 # 1970
187 Session().add(gist)
187 Session().add(gist)
188 Session().commit()
188 Session().commit()
189
189
190 self.app.get(route_path('gist_show', gist_id=gist.gist_access_id),
190 self.app.get(route_path('gist_show', gist_id=gist.gist_access_id),
191 status=404)
191 status=404)
192
192
193 def test_create_private(self):
193 def test_create_private(self):
194 self.log_user()
194 self.log_user()
195 response = self.app.post(
195 response = self.app.post(
196 route_path('gists_create'),
196 route_path('gists_create'),
197 params={'lifetime': -1,
197 params={'lifetime': -1,
198 'content': 'private gist test',
198 'content': 'private gist test',
199 'filename': 'private-foo',
199 'filename': 'private-foo',
200 'private': 'private',
200 'private': 'private',
201 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
201 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
202 'csrf_token': self.csrf_token},
202 'csrf_token': self.csrf_token},
203 status=302)
203 status=302)
204 response = response.follow()
204 response = response.follow()
205 response.mustcontain('added file: private-foo<')
205 response.mustcontain('added file: private-foo<')
206 response.mustcontain('private gist test')
206 response.mustcontain('private gist test')
207 response.mustcontain('Private Gist')
207 response.mustcontain('Private Gist')
208 # Make sure private gists are not indexed by robots
208 # Make sure private gists are not indexed by robots
209 response.mustcontain(
209 response.mustcontain(
210 '<meta name="robots" content="noindex, nofollow">')
210 '<meta name="robots" content="noindex, nofollow">')
211
211
212 def test_create_private_acl_private(self):
212 def test_create_private_acl_private(self):
213 self.log_user()
213 self.log_user()
214 response = self.app.post(
214 response = self.app.post(
215 route_path('gists_create'),
215 route_path('gists_create'),
216 params={'lifetime': -1,
216 params={'lifetime': -1,
217 'content': 'private gist test',
217 'content': 'private gist test',
218 'filename': 'private-foo',
218 'filename': 'private-foo',
219 'private': 'private',
219 'private': 'private',
220 'gist_acl_level': Gist.ACL_LEVEL_PRIVATE,
220 'gist_acl_level': Gist.ACL_LEVEL_PRIVATE,
221 'csrf_token': self.csrf_token},
221 'csrf_token': self.csrf_token},
222 status=302)
222 status=302)
223 response = response.follow()
223 response = response.follow()
224 response.mustcontain('added file: private-foo<')
224 response.mustcontain('added file: private-foo<')
225 response.mustcontain('private gist test')
225 response.mustcontain('private gist test')
226 response.mustcontain('Private Gist')
226 response.mustcontain('Private Gist')
227 # Make sure private gists are not indexed by robots
227 # Make sure private gists are not indexed by robots
228 response.mustcontain(
228 response.mustcontain(
229 '<meta name="robots" content="noindex, nofollow">')
229 '<meta name="robots" content="noindex, nofollow">')
230
230
231 def test_create_with_description(self):
231 def test_create_with_description(self):
232 self.log_user()
232 self.log_user()
233 response = self.app.post(
233 response = self.app.post(
234 route_path('gists_create'),
234 route_path('gists_create'),
235 params={'lifetime': -1,
235 params={'lifetime': -1,
236 'content': 'gist test',
236 'content': 'gist test',
237 'filename': 'foo-desc',
237 'filename': 'foo-desc',
238 'description': 'gist-desc',
238 'description': 'gist-desc',
239 'public': 'public',
239 'public': 'public',
240 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
240 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
241 'csrf_token': self.csrf_token},
241 'csrf_token': self.csrf_token},
242 status=302)
242 status=302)
243 response = response.follow()
243 response = response.follow()
244 response.mustcontain('added file: foo-desc')
244 response.mustcontain('added file: foo-desc')
245 response.mustcontain('gist test')
245 response.mustcontain('gist test')
246 response.mustcontain('gist-desc')
246 response.mustcontain('gist-desc')
247
247
248 def test_create_public_with_anonymous_access(self):
248 def test_create_public_with_anonymous_access(self):
249 self.log_user()
249 self.log_user()
250 params = {
250 params = {
251 'lifetime': -1,
251 'lifetime': -1,
252 'content': 'gist test',
252 'content': 'gist test',
253 'filename': 'foo-desc',
253 'filename': 'foo-desc',
254 'description': 'gist-desc',
254 'description': 'gist-desc',
255 'public': 'public',
255 'public': 'public',
256 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
256 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
257 'csrf_token': self.csrf_token
257 'csrf_token': self.csrf_token
258 }
258 }
259 response = self.app.post(
259 response = self.app.post(
260 route_path('gists_create'), params=params, status=302)
260 route_path('gists_create'), params=params, status=302)
261 self.logout_user()
261 self.logout_user()
262 response = response.follow()
262 response = response.follow()
263 response.mustcontain('added file: foo-desc')
263 response.mustcontain('added file: foo-desc')
264 response.mustcontain('gist test')
264 response.mustcontain('gist test')
265 response.mustcontain('gist-desc')
265 response.mustcontain('gist-desc')
266
266
267 def test_new(self):
267 def test_new(self):
268 self.log_user()
268 self.log_user()
269 self.app.get(route_path('gists_new'))
269 self.app.get(route_path('gists_new'))
270
270
271 def test_delete(self, create_gist):
271 def test_delete(self, create_gist):
272 self.log_user()
272 self.log_user()
273 gist = create_gist('delete-me')
273 gist = create_gist('delete-me')
274 response = self.app.post(
274 response = self.app.post(
275 route_path('gist_delete', gist_id=gist.gist_id),
275 route_path('gist_delete', gist_id=gist.gist_id),
276 params={'csrf_token': self.csrf_token})
276 params={'csrf_token': self.csrf_token})
277 assert_session_flash(response, 'Deleted gist %s' % gist.gist_id)
277 assert_session_flash(response, 'Deleted gist %s' % gist.gist_id)
278
278
279 def test_delete_normal_user_his_gist(self, create_gist):
279 def test_delete_normal_user_his_gist(self, create_gist):
280 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
280 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
281 gist = create_gist('delete-me', owner=TEST_USER_REGULAR_LOGIN)
281 gist = create_gist('delete-me', owner=TEST_USER_REGULAR_LOGIN)
282
282
283 response = self.app.post(
283 response = self.app.post(
284 route_path('gist_delete', gist_id=gist.gist_id),
284 route_path('gist_delete', gist_id=gist.gist_id),
285 params={'csrf_token': self.csrf_token})
285 params={'csrf_token': self.csrf_token})
286 assert_session_flash(response, 'Deleted gist %s' % gist.gist_id)
286 assert_session_flash(response, 'Deleted gist %s' % gist.gist_id)
287
287
288 def test_delete_normal_user_not_his_own_gist(self, create_gist):
288 def test_delete_normal_user_not_his_own_gist(self, create_gist):
289 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
289 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
290 gist = create_gist('delete-me-2')
290 gist = create_gist('delete-me-2')
291
291
292 self.app.post(
292 self.app.post(
293 route_path('gist_delete', gist_id=gist.gist_id),
293 route_path('gist_delete', gist_id=gist.gist_id),
294 params={'csrf_token': self.csrf_token}, status=404)
294 params={'csrf_token': self.csrf_token}, status=404)
295
295
296 def test_show(self, create_gist):
296 def test_show(self, create_gist):
297 gist = create_gist('gist-show-me')
297 gist = create_gist('gist-show-me')
298 response = self.app.get(route_path('gist_show', gist_id=gist.gist_access_id))
298 response = self.app.get(route_path('gist_show', gist_id=gist.gist_access_id))
299
299
300 response.mustcontain('added file: gist-show-me<')
300 response.mustcontain('added file: gist-show-me<')
301
301
302 assert_response = response.assert_response()
302 assert_response = response.assert_response()
303 assert_response.element_equals_to(
303 assert_response.element_equals_to(
304 'div.rc-user span.user',
304 'div.rc-user span.user',
305 '<a href="/_profiles/test_admin">test_admin</a></span>')
305 '<a href="/_profiles/test_admin">test_admin</a></span>')
306
306
307 response.mustcontain('gist-desc')
307 response.mustcontain('gist-desc')
308
308
309 def test_show_without_hg(self, create_gist):
309 def test_show_without_hg(self, create_gist):
310 with mock.patch(
310 with mock.patch(
311 'rhodecode.lib.vcs.settings.ALIASES', ['git']):
311 'rhodecode.lib.vcs.settings.ALIASES', ['git']):
312 gist = create_gist('gist-show-me-again')
312 gist = create_gist('gist-show-me-again')
313 self.app.get(
313 self.app.get(
314 route_path('gist_show', gist_id=gist.gist_access_id), status=200)
314 route_path('gist_show', gist_id=gist.gist_access_id), status=200)
315
315
316 def test_show_acl_private(self, create_gist):
316 def test_show_acl_private(self, create_gist):
317 gist = create_gist('gist-show-me-only-when-im-logged-in',
317 gist = create_gist('gist-show-me-only-when-im-logged-in',
318 acl_level=Gist.ACL_LEVEL_PRIVATE)
318 acl_level=Gist.ACL_LEVEL_PRIVATE)
319 self.app.get(
319 self.app.get(
320 route_path('gist_show', gist_id=gist.gist_access_id), status=404)
320 route_path('gist_show', gist_id=gist.gist_access_id), status=404)
321
321
322 # now we log-in we should see thi gist
322 # now we log-in we should see thi gist
323 self.log_user()
323 self.log_user()
324 response = self.app.get(
324 response = self.app.get(
325 route_path('gist_show', gist_id=gist.gist_access_id))
325 route_path('gist_show', gist_id=gist.gist_access_id))
326 response.mustcontain('added file: gist-show-me-only-when-im-logged-in')
326 response.mustcontain('added file: gist-show-me-only-when-im-logged-in')
327
327
328 assert_response = response.assert_response()
328 assert_response = response.assert_response()
329 assert_response.element_equals_to(
329 assert_response.element_equals_to(
330 'div.rc-user span.user',
330 'div.rc-user span.user',
331 '<a href="/_profiles/test_admin">test_admin</a></span>')
331 '<a href="/_profiles/test_admin">test_admin</a></span>')
332 response.mustcontain('gist-desc')
332 response.mustcontain('gist-desc')
333
333
334 def test_show_as_raw(self, create_gist):
334 def test_show_as_raw(self, create_gist):
335 gist = create_gist('gist-show-me', content='GIST CONTENT')
335 gist = create_gist('gist-show-me', content='GIST CONTENT')
336 response = self.app.get(
336 response = self.app.get(
337 route_path('gist_show_formatted',
337 route_path('gist_show_formatted',
338 gist_id=gist.gist_access_id, revision='tip',
338 gist_id=gist.gist_access_id, revision='tip',
339 format='raw'))
339 format='raw'))
340 assert response.body == 'GIST CONTENT'
340 assert response.body == 'GIST CONTENT'
341
341
342 def test_show_as_raw_individual_file(self, create_gist):
342 def test_show_as_raw_individual_file(self, create_gist):
343 gist = create_gist('gist-show-me-raw', content='GIST BODY')
343 gist = create_gist('gist-show-me-raw', content='GIST BODY')
344 response = self.app.get(
344 response = self.app.get(
345 route_path('gist_show_formatted_path',
345 route_path('gist_show_formatted_path',
346 gist_id=gist.gist_access_id, format='raw',
346 gist_id=gist.gist_access_id, format='raw',
347 revision='tip', f_path='gist-show-me-raw'))
347 revision='tip', f_path='gist-show-me-raw'))
348 assert response.body == 'GIST BODY'
348 assert response.body == 'GIST BODY'
349
349
350 def test_edit_page(self, create_gist):
350 def test_edit_page(self, create_gist):
351 self.log_user()
351 self.log_user()
352 gist = create_gist('gist-for-edit', content='GIST EDIT BODY')
352 gist = create_gist('gist-for-edit', content='GIST EDIT BODY')
353 response = self.app.get(route_path('gist_edit', gist_id=gist.gist_access_id))
353 response = self.app.get(route_path('gist_edit', gist_id=gist.gist_access_id))
354 response.mustcontain('GIST EDIT BODY')
354 response.mustcontain('GIST EDIT BODY')
355
355
356 def test_edit_page_non_logged_user(self, create_gist):
356 def test_edit_page_non_logged_user(self, create_gist):
357 gist = create_gist('gist-for-edit', content='GIST EDIT BODY')
357 gist = create_gist('gist-for-edit', content='GIST EDIT BODY')
358 self.app.get(route_path('gist_edit', gist_id=gist.gist_access_id),
358 self.app.get(route_path('gist_edit', gist_id=gist.gist_access_id),
359 status=302)
359 status=302)
360
360
361 def test_edit_normal_user_his_gist(self, create_gist):
361 def test_edit_normal_user_his_gist(self, create_gist):
362 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
362 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
363 gist = create_gist('gist-for-edit', owner=TEST_USER_REGULAR_LOGIN)
363 gist = create_gist('gist-for-edit', owner=TEST_USER_REGULAR_LOGIN)
364 self.app.get(route_path('gist_edit', gist_id=gist.gist_access_id,
364 self.app.get(route_path('gist_edit', gist_id=gist.gist_access_id,
365 status=200))
365 status=200))
366
366
367 def test_edit_normal_user_not_his_own_gist(self, create_gist):
367 def test_edit_normal_user_not_his_own_gist(self, create_gist):
368 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
368 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
369 gist = create_gist('delete-me')
369 gist = create_gist('delete-me')
370 self.app.get(route_path('gist_edit', gist_id=gist.gist_access_id),
370 self.app.get(route_path('gist_edit', gist_id=gist.gist_access_id),
371 status=404)
371 status=404)
372
372
373 def test_user_first_name_is_escaped(self, user_util, create_gist):
373 def test_user_first_name_is_escaped(self, user_util, create_gist):
374 xss_atack_string = '"><script>alert(\'First Name\')</script>'
374 xss_atack_string = '"><script>alert(\'First Name\')</script>'
375 xss_escaped_string = h.html_escape(h.escape(xss_atack_string))
375 xss_escaped_string = h.html_escape(h.escape(xss_atack_string))
376 password = 'test'
376 password = 'test'
377 user = user_util.create_user(
377 user = user_util.create_user(
378 firstname=xss_atack_string, password=password)
378 firstname=xss_atack_string, password=password)
379 create_gist('gist', gist_type='public', owner=user.username)
379 create_gist('gist', gist_type='public', owner=user.username)
380 response = self.app.get(route_path('gists_show'))
380 response = self.app.get(route_path('gists_show'))
381 response.mustcontain(xss_escaped_string)
381 response.mustcontain(xss_escaped_string)
382
382
383 def test_user_last_name_is_escaped(self, user_util, create_gist):
383 def test_user_last_name_is_escaped(self, user_util, create_gist):
384 xss_atack_string = '"><script>alert(\'Last Name\')</script>'
384 xss_atack_string = '"><script>alert(\'Last Name\')</script>'
385 xss_escaped_string = h.html_escape(h.escape(xss_atack_string))
385 xss_escaped_string = h.html_escape(h.escape(xss_atack_string))
386 password = 'test'
386 password = 'test'
387 user = user_util.create_user(
387 user = user_util.create_user(
388 lastname=xss_atack_string, password=password)
388 lastname=xss_atack_string, password=password)
389 create_gist('gist', gist_type='public', owner=user.username)
389 create_gist('gist', gist_type='public', owner=user.username)
390 response = self.app.get(route_path('gists_show'))
390 response = self.app.get(route_path('gists_show'))
391 response.mustcontain(xss_escaped_string)
391 response.mustcontain(xss_escaped_string)
@@ -1,524 +1,524 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import re
21 import re
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
26 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
27 from rhodecode.lib import helpers as h
27 from rhodecode.lib import helpers as h
28 from rhodecode.lib.compat import OrderedDict
28 from rhodecode.lib.compat import OrderedDict
29 from rhodecode.lib.utils2 import AttributeDict, safe_str
29 from rhodecode.lib.utils2 import AttributeDict, safe_str
30 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
30 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
31 from rhodecode.model.db import Repository
31 from rhodecode.model.db import Repository
32 from rhodecode.model.meta import Session
32 from rhodecode.model.meta import Session
33 from rhodecode.model.repo import RepoModel
33 from rhodecode.model.repo import RepoModel
34 from rhodecode.model.scm import ScmModel
34 from rhodecode.model.scm import ScmModel
35 from rhodecode.tests import assert_session_flash
35 from rhodecode.tests import assert_session_flash
36 from rhodecode.tests.fixture import Fixture
36 from rhodecode.tests.fixture import Fixture
37 from rhodecode.tests.utils import AssertResponse, repo_on_filesystem
37 from rhodecode.tests.utils import AssertResponse, repo_on_filesystem
38
38
39
39
40 fixture = Fixture()
40 fixture = Fixture()
41
41
42
42
43 def route_path(name, params=None, **kwargs):
43 def route_path(name, params=None, **kwargs):
44 import urllib
44 import urllib
45
45
46 base_url = {
46 base_url = {
47 'repo_summary': '/{repo_name}',
47 'repo_summary': '/{repo_name}',
48 'repo_stats': '/{repo_name}/repo_stats/{commit_id}',
48 'repo_stats': '/{repo_name}/repo_stats/{commit_id}',
49 'repo_refs_data': '/{repo_name}/refs-data',
49 'repo_refs_data': '/{repo_name}/refs-data',
50 'repo_refs_changelog_data': '/{repo_name}/refs-data-changelog',
50 'repo_refs_changelog_data': '/{repo_name}/refs-data-changelog',
51 'repo_creating_check': '/{repo_name}/repo_creating_check',
51 'repo_creating_check': '/{repo_name}/repo_creating_check',
52 }[name].format(**kwargs)
52 }[name].format(**kwargs)
53
53
54 if params:
54 if params:
55 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
55 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
56 return base_url
56 return base_url
57
57
58
58
59 def assert_clone_url(response, server, repo, disabled=False):
59 def assert_clone_url(response, server, repo, disabled=False):
60
60
61 response.mustcontain(
61 response.mustcontain(
62 '<input type="text" class="input-monospace clone_url_input" '
62 '<input type="text" class="input-monospace clone_url_input" '
63 '{disabled}readonly="readonly" '
63 '{disabled}readonly="readonly" '
64 'value="http://test_admin@{server}/{repo}"/>'.format(
64 'value="http://test_admin@{server}/{repo}"/>'.format(
65 server=server, repo=repo, disabled='disabled ' if disabled else ' ')
65 server=server, repo=repo, disabled='disabled ' if disabled else ' ')
66 )
66 )
67
67
68
68
69 @pytest.mark.usefixtures('app')
69 @pytest.mark.usefixtures('app')
70 class TestSummaryView(object):
70 class TestSummaryView(object):
71 def test_index(self, autologin_user, backend, http_host_only_stub):
71 def test_index(self, autologin_user, backend, http_host_only_stub):
72 repo_id = backend.repo.repo_id
72 repo_id = backend.repo.repo_id
73 repo_name = backend.repo_name
73 repo_name = backend.repo_name
74 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
74 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
75 return_value=False):
75 return_value=False):
76 response = self.app.get(
76 response = self.app.get(
77 route_path('repo_summary', repo_name=repo_name))
77 route_path('repo_summary', repo_name=repo_name))
78
78
79 # repo type
79 # repo type
80 response.mustcontain(
80 response.mustcontain(
81 '<i class="icon-%s">' % (backend.alias, )
81 '<i class="icon-%s">' % (backend.alias, )
82 )
82 )
83 # public/private
83 # public/private
84 response.mustcontain(
84 response.mustcontain(
85 """<i class="icon-unlock-alt">"""
85 """<i class="icon-unlock-alt">"""
86 )
86 )
87
87
88 # clone url...
88 # clone url...
89 assert_clone_url(response, http_host_only_stub, repo_name)
89 assert_clone_url(response, http_host_only_stub, repo_name)
90 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
90 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
91
91
92 def test_index_svn_without_proxy(
92 def test_index_svn_without_proxy(
93 self, autologin_user, backend_svn, http_host_only_stub):
93 self, autologin_user, backend_svn, http_host_only_stub):
94 repo_id = backend_svn.repo.repo_id
94 repo_id = backend_svn.repo.repo_id
95 repo_name = backend_svn.repo_name
95 repo_name = backend_svn.repo_name
96 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
96 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
97 # clone url...
97 # clone url...
98
98
99 assert_clone_url(response, http_host_only_stub, repo_name, disabled=True)
99 assert_clone_url(response, http_host_only_stub, repo_name, disabled=True)
100 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id), disabled=True)
100 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id), disabled=True)
101
101
102 def test_index_with_trailing_slash(
102 def test_index_with_trailing_slash(
103 self, autologin_user, backend, http_host_only_stub):
103 self, autologin_user, backend, http_host_only_stub):
104
104
105 repo_id = backend.repo.repo_id
105 repo_id = backend.repo.repo_id
106 repo_name = backend.repo_name
106 repo_name = backend.repo_name
107 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
107 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
108 return_value=False):
108 return_value=False):
109 response = self.app.get(
109 response = self.app.get(
110 route_path('repo_summary', repo_name=repo_name) + '/',
110 route_path('repo_summary', repo_name=repo_name) + '/',
111 status=200)
111 status=200)
112
112
113 # clone url...
113 # clone url...
114 assert_clone_url(response, http_host_only_stub, repo_name)
114 assert_clone_url(response, http_host_only_stub, repo_name)
115 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
115 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
116
116
117 def test_index_by_id(self, autologin_user, backend):
117 def test_index_by_id(self, autologin_user, backend):
118 repo_id = backend.repo.repo_id
118 repo_id = backend.repo.repo_id
119 response = self.app.get(
119 response = self.app.get(
120 route_path('repo_summary', repo_name='_%s' % (repo_id,)))
120 route_path('repo_summary', repo_name='_%s' % (repo_id,)))
121
121
122 # repo type
122 # repo type
123 response.mustcontain(
123 response.mustcontain(
124 '<i class="icon-%s">' % (backend.alias, )
124 '<i class="icon-%s">' % (backend.alias, )
125 )
125 )
126 # public/private
126 # public/private
127 response.mustcontain(
127 response.mustcontain(
128 """<i class="icon-unlock-alt">"""
128 """<i class="icon-unlock-alt">"""
129 )
129 )
130
130
131 def test_index_by_repo_having_id_path_in_name_hg(self, autologin_user):
131 def test_index_by_repo_having_id_path_in_name_hg(self, autologin_user):
132 fixture.create_repo(name='repo_1')
132 fixture.create_repo(name='repo_1')
133 response = self.app.get(route_path('repo_summary', repo_name='repo_1'))
133 response = self.app.get(route_path('repo_summary', repo_name='repo_1'))
134
134
135 try:
135 try:
136 response.mustcontain("repo_1")
136 response.mustcontain("repo_1")
137 finally:
137 finally:
138 RepoModel().delete(Repository.get_by_repo_name('repo_1'))
138 RepoModel().delete(Repository.get_by_repo_name('repo_1'))
139 Session().commit()
139 Session().commit()
140
140
141 def test_index_with_anonymous_access_disabled(
141 def test_index_with_anonymous_access_disabled(
142 self, backend, disable_anonymous_user):
142 self, backend, disable_anonymous_user):
143 response = self.app.get(
143 response = self.app.get(
144 route_path('repo_summary', repo_name=backend.repo_name), status=302)
144 route_path('repo_summary', repo_name=backend.repo_name), status=302)
145 assert 'login' in response.location
145 assert 'login' in response.location
146
146
147 def _enable_stats(self, repo):
147 def _enable_stats(self, repo):
148 r = Repository.get_by_repo_name(repo)
148 r = Repository.get_by_repo_name(repo)
149 r.enable_statistics = True
149 r.enable_statistics = True
150 Session().add(r)
150 Session().add(r)
151 Session().commit()
151 Session().commit()
152
152
153 expected_trending = {
153 expected_trending = {
154 'hg': {
154 'hg': {
155 "py": {"count": 68, "desc": ["Python"]},
155 "py": {"count": 68, "desc": ["Python"]},
156 "rst": {"count": 16, "desc": ["Rst"]},
156 "rst": {"count": 16, "desc": ["Rst"]},
157 "css": {"count": 2, "desc": ["Css"]},
157 "css": {"count": 2, "desc": ["Css"]},
158 "sh": {"count": 2, "desc": ["Bash"]},
158 "sh": {"count": 2, "desc": ["Bash"]},
159 "bat": {"count": 1, "desc": ["Batch"]},
159 "bat": {"count": 1, "desc": ["Batch"]},
160 "cfg": {"count": 1, "desc": ["Ini"]},
160 "cfg": {"count": 1, "desc": ["Ini"]},
161 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
161 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
162 "ini": {"count": 1, "desc": ["Ini"]},
162 "ini": {"count": 1, "desc": ["Ini"]},
163 "js": {"count": 1, "desc": ["Javascript"]},
163 "js": {"count": 1, "desc": ["Javascript"]},
164 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
164 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
165 },
165 },
166 'git': {
166 'git': {
167 "py": {"count": 68, "desc": ["Python"]},
167 "py": {"count": 68, "desc": ["Python"]},
168 "rst": {"count": 16, "desc": ["Rst"]},
168 "rst": {"count": 16, "desc": ["Rst"]},
169 "css": {"count": 2, "desc": ["Css"]},
169 "css": {"count": 2, "desc": ["Css"]},
170 "sh": {"count": 2, "desc": ["Bash"]},
170 "sh": {"count": 2, "desc": ["Bash"]},
171 "bat": {"count": 1, "desc": ["Batch"]},
171 "bat": {"count": 1, "desc": ["Batch"]},
172 "cfg": {"count": 1, "desc": ["Ini"]},
172 "cfg": {"count": 1, "desc": ["Ini"]},
173 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
173 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
174 "ini": {"count": 1, "desc": ["Ini"]},
174 "ini": {"count": 1, "desc": ["Ini"]},
175 "js": {"count": 1, "desc": ["Javascript"]},
175 "js": {"count": 1, "desc": ["Javascript"]},
176 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
176 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
177 },
177 },
178 'svn': {
178 'svn': {
179 "py": {"count": 75, "desc": ["Python"]},
179 "py": {"count": 75, "desc": ["Python"]},
180 "rst": {"count": 16, "desc": ["Rst"]},
180 "rst": {"count": 16, "desc": ["Rst"]},
181 "html": {"count": 11, "desc": ["EvoqueHtml", "Html"]},
181 "html": {"count": 11, "desc": ["EvoqueHtml", "Html"]},
182 "css": {"count": 2, "desc": ["Css"]},
182 "css": {"count": 2, "desc": ["Css"]},
183 "bat": {"count": 1, "desc": ["Batch"]},
183 "bat": {"count": 1, "desc": ["Batch"]},
184 "cfg": {"count": 1, "desc": ["Ini"]},
184 "cfg": {"count": 1, "desc": ["Ini"]},
185 "ini": {"count": 1, "desc": ["Ini"]},
185 "ini": {"count": 1, "desc": ["Ini"]},
186 "js": {"count": 1, "desc": ["Javascript"]},
186 "js": {"count": 1, "desc": ["Javascript"]},
187 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]},
187 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]},
188 "sh": {"count": 1, "desc": ["Bash"]}
188 "sh": {"count": 1, "desc": ["Bash"]}
189 },
189 },
190 }
190 }
191
191
192 def test_repo_stats(self, autologin_user, backend, xhr_header):
192 def test_repo_stats(self, autologin_user, backend, xhr_header):
193 response = self.app.get(
193 response = self.app.get(
194 route_path(
194 route_path(
195 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
195 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
196 extra_environ=xhr_header,
196 extra_environ=xhr_header,
197 status=200)
197 status=200)
198 assert re.match(r'6[\d\.]+ KiB', response.json['size'])
198 assert re.match(r'6[\d\.]+ KiB', response.json['size'])
199
199
200 def test_repo_stats_code_stats_enabled(self, autologin_user, backend, xhr_header):
200 def test_repo_stats_code_stats_enabled(self, autologin_user, backend, xhr_header):
201 repo_name = backend.repo_name
201 repo_name = backend.repo_name
202
202
203 # codes stats
203 # codes stats
204 self._enable_stats(repo_name)
204 self._enable_stats(repo_name)
205 ScmModel().mark_for_invalidation(repo_name)
205 ScmModel().mark_for_invalidation(repo_name)
206
206
207 response = self.app.get(
207 response = self.app.get(
208 route_path(
208 route_path(
209 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
209 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
210 extra_environ=xhr_header,
210 extra_environ=xhr_header,
211 status=200)
211 status=200)
212
212
213 expected_data = self.expected_trending[backend.alias]
213 expected_data = self.expected_trending[backend.alias]
214 returned_stats = response.json['code_stats']
214 returned_stats = response.json['code_stats']
215 for k, v in expected_data.items():
215 for k, v in expected_data.items():
216 assert v == returned_stats[k]
216 assert v == returned_stats[k]
217
217
218 def test_repo_refs_data(self, backend):
218 def test_repo_refs_data(self, backend):
219 response = self.app.get(
219 response = self.app.get(
220 route_path('repo_refs_data', repo_name=backend.repo_name),
220 route_path('repo_refs_data', repo_name=backend.repo_name),
221 status=200)
221 status=200)
222
222
223 # Ensure that there is the correct amount of items in the result
223 # Ensure that there is the correct amount of items in the result
224 repo = backend.repo.scm_instance()
224 repo = backend.repo.scm_instance()
225 data = response.json['results']
225 data = response.json['results']
226 items = sum(len(section['children']) for section in data)
226 items = sum(len(section['children']) for section in data)
227 repo_refs = len(repo.branches) + len(repo.tags) + len(repo.bookmarks)
227 repo_refs = len(repo.branches) + len(repo.tags) + len(repo.bookmarks)
228 assert items == repo_refs
228 assert items == repo_refs
229
229
230 def test_index_shows_missing_requirements_message(
230 def test_index_shows_missing_requirements_message(
231 self, backend, autologin_user):
231 self, backend, autologin_user):
232 repo_name = backend.repo_name
232 repo_name = backend.repo_name
233 scm_patcher = mock.patch.object(
233 scm_patcher = mock.patch.object(
234 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
234 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
235
235
236 with scm_patcher:
236 with scm_patcher:
237 response = self.app.get(
237 response = self.app.get(
238 route_path('repo_summary', repo_name=repo_name))
238 route_path('repo_summary', repo_name=repo_name))
239 assert_response = AssertResponse(response)
239 assert_response = AssertResponse(response)
240 assert_response.element_contains(
240 assert_response.element_contains(
241 '.main .alert-warning strong', 'Missing requirements')
241 '.main .alert-warning strong', 'Missing requirements')
242 assert_response.element_contains(
242 assert_response.element_contains(
243 '.main .alert-warning',
243 '.main .alert-warning',
244 'Commits cannot be displayed, because this repository '
244 'Commits cannot be displayed, because this repository '
245 'uses one or more extensions, which was not enabled.')
245 'uses one or more extensions, which was not enabled.')
246
246
247 def test_missing_requirements_page_does_not_contains_switch_to(
247 def test_missing_requirements_page_does_not_contains_switch_to(
248 self, autologin_user, backend):
248 self, autologin_user, backend):
249 repo_name = backend.repo_name
249 repo_name = backend.repo_name
250 scm_patcher = mock.patch.object(
250 scm_patcher = mock.patch.object(
251 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
251 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
252
252
253 with scm_patcher:
253 with scm_patcher:
254 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
254 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
255 response.mustcontain(no='Switch To')
255 response.mustcontain(no='Switch To')
256
256
257
257
258 @pytest.mark.usefixtures('app')
258 @pytest.mark.usefixtures('app')
259 class TestRepoLocation(object):
259 class TestRepoLocation(object):
260
260
261 @pytest.mark.parametrize("suffix", [u'', u'ąęł'], ids=['', 'non-ascii'])
261 @pytest.mark.parametrize("suffix", [u'', u'ąęł'], ids=['', 'non-ascii'])
262 def test_missing_filesystem_repo(
262 def test_missing_filesystem_repo(
263 self, autologin_user, backend, suffix, csrf_token):
263 self, autologin_user, backend, suffix, csrf_token):
264 repo = backend.create_repo(name_suffix=suffix)
264 repo = backend.create_repo(name_suffix=suffix)
265 repo_name = repo.repo_name
265 repo_name = repo.repo_name
266
266
267 # delete from file system
267 # delete from file system
268 RepoModel()._delete_filesystem_repo(repo)
268 RepoModel()._delete_filesystem_repo(repo)
269
269
270 # test if the repo is still in the database
270 # test if the repo is still in the database
271 new_repo = RepoModel().get_by_repo_name(repo_name)
271 new_repo = RepoModel().get_by_repo_name(repo_name)
272 assert new_repo.repo_name == repo_name
272 assert new_repo.repo_name == repo_name
273
273
274 # check if repo is not in the filesystem
274 # check if repo is not in the filesystem
275 assert not repo_on_filesystem(repo_name)
275 assert not repo_on_filesystem(repo_name)
276
276
277 response = self.app.get(
277 response = self.app.get(
278 route_path('repo_summary', repo_name=safe_str(repo_name)), status=302)
278 route_path('repo_summary', repo_name=safe_str(repo_name)), status=302)
279
279
280 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
280 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
281 'Please check if it exist, or is not damaged.' % repo_name
281 'Please check if it exist, or is not damaged.' % repo_name
282 assert_session_flash(response, msg)
282 assert_session_flash(response, msg)
283
283
284 @pytest.mark.parametrize("suffix", [u'', u'ąęł'], ids=['', 'non-ascii'])
284 @pytest.mark.parametrize("suffix", [u'', u'ąęł'], ids=['', 'non-ascii'])
285 def test_missing_filesystem_repo_on_repo_check(
285 def test_missing_filesystem_repo_on_repo_check(
286 self, autologin_user, backend, suffix, csrf_token):
286 self, autologin_user, backend, suffix, csrf_token):
287 repo = backend.create_repo(name_suffix=suffix)
287 repo = backend.create_repo(name_suffix=suffix)
288 repo_name = repo.repo_name
288 repo_name = repo.repo_name
289
289
290 # delete from file system
290 # delete from file system
291 RepoModel()._delete_filesystem_repo(repo)
291 RepoModel()._delete_filesystem_repo(repo)
292
292
293 # test if the repo is still in the database
293 # test if the repo is still in the database
294 new_repo = RepoModel().get_by_repo_name(repo_name)
294 new_repo = RepoModel().get_by_repo_name(repo_name)
295 assert new_repo.repo_name == repo_name
295 assert new_repo.repo_name == repo_name
296
296
297 # check if repo is not in the filesystem
297 # check if repo is not in the filesystem
298 assert not repo_on_filesystem(repo_name)
298 assert not repo_on_filesystem(repo_name)
299
299
300 # flush the session
300 # flush the session
301 self.app.get(
301 self.app.get(
302 route_path('repo_summary', repo_name=safe_str(repo_name)),
302 route_path('repo_summary', repo_name=safe_str(repo_name)),
303 status=302)
303 status=302)
304
304
305 response = self.app.get(
305 response = self.app.get(
306 route_path('repo_creating_check', repo_name=safe_str(repo_name)),
306 route_path('repo_creating_check', repo_name=safe_str(repo_name)),
307 status=200)
307 status=200)
308 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
308 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
309 'Please check if it exist, or is not damaged.' % repo_name
309 'Please check if it exist, or is not damaged.' % repo_name
310 assert_session_flash(response, msg )
310 assert_session_flash(response, msg )
311
311
312
312
313 @pytest.fixture()
313 @pytest.fixture()
314 def summary_view(context_stub, request_stub, user_util):
314 def summary_view(context_stub, request_stub, user_util):
315 """
315 """
316 Bootstrap view to test the view functions
316 Bootstrap view to test the view functions
317 """
317 """
318 request_stub.matched_route = AttributeDict(name='test_view')
318 request_stub.matched_route = AttributeDict(name='test_view')
319
319
320 request_stub.user = user_util.create_user().AuthUser()
320 request_stub.user = user_util.create_user().AuthUser()
321 request_stub.db_repo = user_util.create_repo()
321 request_stub.db_repo = user_util.create_repo()
322
322
323 view = RepoSummaryView(context=context_stub, request=request_stub)
323 view = RepoSummaryView(context=context_stub, request=request_stub)
324 return view
324 return view
325
325
326
326
327 @pytest.mark.usefixtures('app')
327 @pytest.mark.usefixtures('app')
328 class TestCreateReferenceData(object):
328 class TestCreateReferenceData(object):
329
329
330 @pytest.fixture
330 @pytest.fixture()
331 def example_refs(self):
331 def example_refs(self):
332 section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id')))
332 section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id')))
333 example_refs = [
333 example_refs = [
334 ('section_1', section_1_refs, 't1'),
334 ('section_1', section_1_refs, 't1'),
335 ('section_2', {'c': 'c_id'}, 't2'),
335 ('section_2', {'c': 'c_id'}, 't2'),
336 ]
336 ]
337 return example_refs
337 return example_refs
338
338
339 def test_generates_refs_based_on_commit_ids(self, example_refs, summary_view):
339 def test_generates_refs_based_on_commit_ids(self, example_refs, summary_view):
340 repo = mock.Mock()
340 repo = mock.Mock()
341 repo.name = 'test-repo'
341 repo.name = 'test-repo'
342 repo.alias = 'git'
342 repo.alias = 'git'
343 full_repo_name = 'pytest-repo-group/' + repo.name
343 full_repo_name = 'pytest-repo-group/' + repo.name
344
344
345 result = summary_view._create_reference_data(
345 result = summary_view._create_reference_data(
346 repo, full_repo_name, example_refs)
346 repo, full_repo_name, example_refs)
347
347
348 expected_files_url = '/{}/files/'.format(full_repo_name)
348 expected_files_url = '/{}/files/'.format(full_repo_name)
349 expected_result = [
349 expected_result = [
350 {
350 {
351 'children': [
351 'children': [
352 {
352 {
353 'id': 'a', 'idx': 0, 'raw_id': 'a_id', 'text': 'a', 'type': 't1',
353 'id': 'a', 'idx': 0, 'raw_id': 'a_id', 'text': 'a', 'type': 't1',
354 'files_url': expected_files_url + 'a/?at=a',
354 'files_url': expected_files_url + 'a/?at=a',
355 },
355 },
356 {
356 {
357 'id': 'b', 'idx': 0, 'raw_id': 'b_id', 'text': 'b', 'type': 't1',
357 'id': 'b', 'idx': 0, 'raw_id': 'b_id', 'text': 'b', 'type': 't1',
358 'files_url': expected_files_url + 'b/?at=b',
358 'files_url': expected_files_url + 'b/?at=b',
359 }
359 }
360 ],
360 ],
361 'text': 'section_1'
361 'text': 'section_1'
362 },
362 },
363 {
363 {
364 'children': [
364 'children': [
365 {
365 {
366 'id': 'c', 'idx': 0, 'raw_id': 'c_id', 'text': 'c', 'type': 't2',
366 'id': 'c', 'idx': 0, 'raw_id': 'c_id', 'text': 'c', 'type': 't2',
367 'files_url': expected_files_url + 'c/?at=c',
367 'files_url': expected_files_url + 'c/?at=c',
368 }
368 }
369 ],
369 ],
370 'text': 'section_2'
370 'text': 'section_2'
371 }]
371 }]
372 assert result == expected_result
372 assert result == expected_result
373
373
374 def test_generates_refs_with_path_for_svn(self, example_refs, summary_view):
374 def test_generates_refs_with_path_for_svn(self, example_refs, summary_view):
375 repo = mock.Mock()
375 repo = mock.Mock()
376 repo.name = 'test-repo'
376 repo.name = 'test-repo'
377 repo.alias = 'svn'
377 repo.alias = 'svn'
378 full_repo_name = 'pytest-repo-group/' + repo.name
378 full_repo_name = 'pytest-repo-group/' + repo.name
379
379
380 result = summary_view._create_reference_data(
380 result = summary_view._create_reference_data(
381 repo, full_repo_name, example_refs)
381 repo, full_repo_name, example_refs)
382
382
383 expected_files_url = '/{}/files/'.format(full_repo_name)
383 expected_files_url = '/{}/files/'.format(full_repo_name)
384 expected_result = [
384 expected_result = [
385 {
385 {
386 'children': [
386 'children': [
387 {
387 {
388 'id': 'a@a_id', 'idx': 0, 'raw_id': 'a_id',
388 'id': 'a@a_id', 'idx': 0, 'raw_id': 'a_id',
389 'text': 'a', 'type': 't1',
389 'text': 'a', 'type': 't1',
390 'files_url': expected_files_url + 'a_id/a?at=a',
390 'files_url': expected_files_url + 'a_id/a?at=a',
391 },
391 },
392 {
392 {
393 'id': 'b@b_id', 'idx': 0, 'raw_id': 'b_id',
393 'id': 'b@b_id', 'idx': 0, 'raw_id': 'b_id',
394 'text': 'b', 'type': 't1',
394 'text': 'b', 'type': 't1',
395 'files_url': expected_files_url + 'b_id/b?at=b',
395 'files_url': expected_files_url + 'b_id/b?at=b',
396 }
396 }
397 ],
397 ],
398 'text': 'section_1'
398 'text': 'section_1'
399 },
399 },
400 {
400 {
401 'children': [
401 'children': [
402 {
402 {
403 'id': 'c@c_id', 'idx': 0, 'raw_id': 'c_id',
403 'id': 'c@c_id', 'idx': 0, 'raw_id': 'c_id',
404 'text': 'c', 'type': 't2',
404 'text': 'c', 'type': 't2',
405 'files_url': expected_files_url + 'c_id/c?at=c',
405 'files_url': expected_files_url + 'c_id/c?at=c',
406 }
406 }
407 ],
407 ],
408 'text': 'section_2'
408 'text': 'section_2'
409 }
409 }
410 ]
410 ]
411 assert result == expected_result
411 assert result == expected_result
412
412
413
413
414 class TestCreateFilesUrl(object):
414 class TestCreateFilesUrl(object):
415
415
416 def test_creates_non_svn_url(self, app, summary_view):
416 def test_creates_non_svn_url(self, app, summary_view):
417 repo = mock.Mock()
417 repo = mock.Mock()
418 repo.name = 'abcde'
418 repo.name = 'abcde'
419 full_repo_name = 'test-repo-group/' + repo.name
419 full_repo_name = 'test-repo-group/' + repo.name
420 ref_name = 'branch1'
420 ref_name = 'branch1'
421 raw_id = 'deadbeef0123456789'
421 raw_id = 'deadbeef0123456789'
422 is_svn = False
422 is_svn = False
423
423
424 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
424 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
425 result = summary_view._create_files_url(
425 result = summary_view._create_files_url(
426 repo, full_repo_name, ref_name, raw_id, is_svn)
426 repo, full_repo_name, ref_name, raw_id, is_svn)
427 url_mock.assert_called_once_with(
427 url_mock.assert_called_once_with(
428 'repo_files', repo_name=full_repo_name, commit_id=ref_name,
428 'repo_files', repo_name=full_repo_name, commit_id=ref_name,
429 f_path='', _query=dict(at=ref_name))
429 f_path='', _query=dict(at=ref_name))
430 assert result == url_mock.return_value
430 assert result == url_mock.return_value
431
431
432 def test_creates_svn_url(self, app, summary_view):
432 def test_creates_svn_url(self, app, summary_view):
433 repo = mock.Mock()
433 repo = mock.Mock()
434 repo.name = 'abcde'
434 repo.name = 'abcde'
435 full_repo_name = 'test-repo-group/' + repo.name
435 full_repo_name = 'test-repo-group/' + repo.name
436 ref_name = 'branch1'
436 ref_name = 'branch1'
437 raw_id = 'deadbeef0123456789'
437 raw_id = 'deadbeef0123456789'
438 is_svn = True
438 is_svn = True
439
439
440 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
440 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
441 result = summary_view._create_files_url(
441 result = summary_view._create_files_url(
442 repo, full_repo_name, ref_name, raw_id, is_svn)
442 repo, full_repo_name, ref_name, raw_id, is_svn)
443 url_mock.assert_called_once_with(
443 url_mock.assert_called_once_with(
444 'repo_files', repo_name=full_repo_name, f_path=ref_name,
444 'repo_files', repo_name=full_repo_name, f_path=ref_name,
445 commit_id=raw_id, _query=dict(at=ref_name))
445 commit_id=raw_id, _query=dict(at=ref_name))
446 assert result == url_mock.return_value
446 assert result == url_mock.return_value
447
447
448 def test_name_has_slashes(self, app, summary_view):
448 def test_name_has_slashes(self, app, summary_view):
449 repo = mock.Mock()
449 repo = mock.Mock()
450 repo.name = 'abcde'
450 repo.name = 'abcde'
451 full_repo_name = 'test-repo-group/' + repo.name
451 full_repo_name = 'test-repo-group/' + repo.name
452 ref_name = 'branch1/branch2'
452 ref_name = 'branch1/branch2'
453 raw_id = 'deadbeef0123456789'
453 raw_id = 'deadbeef0123456789'
454 is_svn = False
454 is_svn = False
455
455
456 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
456 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
457 result = summary_view._create_files_url(
457 result = summary_view._create_files_url(
458 repo, full_repo_name, ref_name, raw_id, is_svn)
458 repo, full_repo_name, ref_name, raw_id, is_svn)
459 url_mock.assert_called_once_with(
459 url_mock.assert_called_once_with(
460 'repo_files', repo_name=full_repo_name, commit_id=raw_id,
460 'repo_files', repo_name=full_repo_name, commit_id=raw_id,
461 f_path='', _query=dict(at=ref_name))
461 f_path='', _query=dict(at=ref_name))
462 assert result == url_mock.return_value
462 assert result == url_mock.return_value
463
463
464
464
465 class TestReferenceItems(object):
465 class TestReferenceItems(object):
466 repo = mock.Mock()
466 repo = mock.Mock()
467 repo.name = 'pytest-repo'
467 repo.name = 'pytest-repo'
468 repo_full_name = 'pytest-repo-group/' + repo.name
468 repo_full_name = 'pytest-repo-group/' + repo.name
469 ref_type = 'branch'
469 ref_type = 'branch'
470 fake_url = '/abcde/'
470 fake_url = '/abcde/'
471
471
472 @staticmethod
472 @staticmethod
473 def _format_function(name, id_):
473 def _format_function(name, id_):
474 return 'format_function_{}_{}'.format(name, id_)
474 return 'format_function_{}_{}'.format(name, id_)
475
475
476 def test_creates_required_amount_of_items(self, summary_view):
476 def test_creates_required_amount_of_items(self, summary_view):
477 amount = 100
477 amount = 100
478 refs = {
478 refs = {
479 'ref{}'.format(i): '{0:040d}'.format(i)
479 'ref{}'.format(i): '{0:040d}'.format(i)
480 for i in range(amount)
480 for i in range(amount)
481 }
481 }
482
482
483 url_patcher = mock.patch.object(summary_view, '_create_files_url')
483 url_patcher = mock.patch.object(summary_view, '_create_files_url')
484 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
484 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
485 return_value=False)
485 return_value=False)
486
486
487 with url_patcher as url_mock, svn_patcher:
487 with url_patcher as url_mock, svn_patcher:
488 result = summary_view._create_reference_items(
488 result = summary_view._create_reference_items(
489 self.repo, self.repo_full_name, refs, self.ref_type,
489 self.repo, self.repo_full_name, refs, self.ref_type,
490 self._format_function)
490 self._format_function)
491 assert len(result) == amount
491 assert len(result) == amount
492 assert url_mock.call_count == amount
492 assert url_mock.call_count == amount
493
493
494 def test_single_item_details(self, summary_view):
494 def test_single_item_details(self, summary_view):
495 ref_name = 'ref1'
495 ref_name = 'ref1'
496 ref_id = 'deadbeef'
496 ref_id = 'deadbeef'
497 refs = {
497 refs = {
498 ref_name: ref_id
498 ref_name: ref_id
499 }
499 }
500
500
501 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
501 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
502 return_value=False)
502 return_value=False)
503
503
504 url_patcher = mock.patch.object(
504 url_patcher = mock.patch.object(
505 summary_view, '_create_files_url', return_value=self.fake_url)
505 summary_view, '_create_files_url', return_value=self.fake_url)
506
506
507 with url_patcher as url_mock, svn_patcher:
507 with url_patcher as url_mock, svn_patcher:
508 result = summary_view._create_reference_items(
508 result = summary_view._create_reference_items(
509 self.repo, self.repo_full_name, refs, self.ref_type,
509 self.repo, self.repo_full_name, refs, self.ref_type,
510 self._format_function)
510 self._format_function)
511
511
512 url_mock.assert_called_once_with(
512 url_mock.assert_called_once_with(
513 self.repo, self.repo_full_name, ref_name, ref_id, False)
513 self.repo, self.repo_full_name, ref_name, ref_id, False)
514 expected_result = [
514 expected_result = [
515 {
515 {
516 'text': ref_name,
516 'text': ref_name,
517 'id': self._format_function(ref_name, ref_id),
517 'id': self._format_function(ref_name, ref_id),
518 'raw_id': ref_id,
518 'raw_id': ref_id,
519 'idx': 0,
519 'idx': 0,
520 'type': self.ref_type,
520 'type': self.ref_type,
521 'files_url': self.fake_url
521 'files_url': self.fake_url
522 }
522 }
523 ]
523 ]
524 assert result == expected_result
524 assert result == expected_result
@@ -1,70 +1,70 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import pytest
22 import pytest
23 from pyramid.compat import configparser
23 from pyramid.compat import configparser
24
24
25 from rhodecode.apps.ssh_support.lib.ssh_wrapper import SshWrapper
25 from rhodecode.apps.ssh_support.lib.ssh_wrapper import SshWrapper
26 from rhodecode.lib.utils2 import AttributeDict
26 from rhodecode.lib.utils2 import AttributeDict
27
27
28
28
29 @pytest.fixture
29 @pytest.fixture()
30 def dummy_conf_file(tmpdir):
30 def dummy_conf_file(tmpdir):
31 conf = configparser.ConfigParser()
31 conf = configparser.ConfigParser()
32 conf.add_section('app:main')
32 conf.add_section('app:main')
33 conf.set('app:main', 'ssh.executable.hg', '/usr/bin/hg')
33 conf.set('app:main', 'ssh.executable.hg', '/usr/bin/hg')
34 conf.set('app:main', 'ssh.executable.git', '/usr/bin/git')
34 conf.set('app:main', 'ssh.executable.git', '/usr/bin/git')
35 conf.set('app:main', 'ssh.executable.svn', '/usr/bin/svnserve')
35 conf.set('app:main', 'ssh.executable.svn', '/usr/bin/svnserve')
36
36
37 f_path = os.path.join(str(tmpdir), 'ssh_wrapper_test.ini')
37 f_path = os.path.join(str(tmpdir), 'ssh_wrapper_test.ini')
38 with open(f_path, 'wb') as f:
38 with open(f_path, 'wb') as f:
39 conf.write(f)
39 conf.write(f)
40
40
41 return os.path.join(f_path)
41 return os.path.join(f_path)
42
42
43
43
44 def plain_dummy_env():
44 def plain_dummy_env():
45 return {
45 return {
46 'request':
46 'request':
47 AttributeDict(host_url='http://localhost', script_name='/')
47 AttributeDict(host_url='http://localhost', script_name='/')
48 }
48 }
49
49
50
50
51 @pytest.fixture
51 @pytest.fixture()
52 def dummy_env():
52 def dummy_env():
53 return plain_dummy_env()
53 return plain_dummy_env()
54
54
55
55
56 def plain_dummy_user():
56 def plain_dummy_user():
57 return AttributeDict(username='test_user')
57 return AttributeDict(username='test_user')
58
58
59
59
60 @pytest.fixture
60 @pytest.fixture()
61 def dummy_user():
61 def dummy_user():
62 return plain_dummy_user()
62 return plain_dummy_user()
63
63
64
64
65 @pytest.fixture
65 @pytest.fixture()
66 def ssh_wrapper(app, dummy_conf_file, dummy_env):
66 def ssh_wrapper(app, dummy_conf_file, dummy_env):
67 conn_info = '127.0.0.1 22 10.0.0.1 443'
67 conn_info = '127.0.0.1 22 10.0.0.1 443'
68 return SshWrapper(
68 return SshWrapper(
69 'random command', conn_info, 'auto', 'admin', '1', key_id='1',
69 'random command', conn_info, 'auto', 'admin', '1', key_id='1',
70 shell=False, ini_path=dummy_conf_file, env=dummy_env)
70 shell=False, ini_path=dummy_conf_file, env=dummy_env)
@@ -1,152 +1,152 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import mock
22 import mock
23 import pytest
23 import pytest
24
24
25 from rhodecode.apps.ssh_support.lib.backends.git import GitServer
25 from rhodecode.apps.ssh_support.lib.backends.git import GitServer
26 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
26 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
27
27
28
28
29 class GitServerCreator(object):
29 class GitServerCreator(object):
30 root = '/tmp/repo/path/'
30 root = '/tmp/repo/path/'
31 git_path = '/usr/local/bin/git'
31 git_path = '/usr/local/bin/git'
32 config_data = {
32 config_data = {
33 'app:main': {
33 'app:main': {
34 'ssh.executable.git': git_path,
34 'ssh.executable.git': git_path,
35 'vcs.hooks.protocol': 'http',
35 'vcs.hooks.protocol': 'http',
36 }
36 }
37 }
37 }
38 repo_name = 'test_git'
38 repo_name = 'test_git'
39 repo_mode = 'receive-pack'
39 repo_mode = 'receive-pack'
40 user = plain_dummy_user()
40 user = plain_dummy_user()
41
41
42 def __init__(self):
42 def __init__(self):
43 def config_get(part, key):
43 def config_get(part, key):
44 return self.config_data.get(part, {}).get(key)
44 return self.config_data.get(part, {}).get(key)
45 self.config_mock = mock.Mock()
45 self.config_mock = mock.Mock()
46 self.config_mock.get = mock.Mock(side_effect=config_get)
46 self.config_mock.get = mock.Mock(side_effect=config_get)
47
47
48 def create(self, **kwargs):
48 def create(self, **kwargs):
49 parameters = {
49 parameters = {
50 'store': self.root,
50 'store': self.root,
51 'ini_path': '',
51 'ini_path': '',
52 'user': self.user,
52 'user': self.user,
53 'repo_name': self.repo_name,
53 'repo_name': self.repo_name,
54 'repo_mode': self.repo_mode,
54 'repo_mode': self.repo_mode,
55 'user_permissions': {
55 'user_permissions': {
56 self.repo_name: 'repository.admin'
56 self.repo_name: 'repository.admin'
57 },
57 },
58 'config': self.config_mock,
58 'config': self.config_mock,
59 'env': plain_dummy_env()
59 'env': plain_dummy_env()
60 }
60 }
61 parameters.update(kwargs)
61 parameters.update(kwargs)
62 server = GitServer(**parameters)
62 server = GitServer(**parameters)
63 return server
63 return server
64
64
65
65
66 @pytest.fixture
66 @pytest.fixture()
67 def git_server(app):
67 def git_server(app):
68 return GitServerCreator()
68 return GitServerCreator()
69
69
70
70
71 class TestGitServer(object):
71 class TestGitServer(object):
72
72
73 def test_command(self, git_server):
73 def test_command(self, git_server):
74 server = git_server.create()
74 server = git_server.create()
75 expected_command = (
75 expected_command = (
76 'cd {root}; {git_path} {repo_mode} \'{root}{repo_name}\''.format(
76 'cd {root}; {git_path} {repo_mode} \'{root}{repo_name}\''.format(
77 root=git_server.root, git_path=git_server.git_path,
77 root=git_server.root, git_path=git_server.git_path,
78 repo_mode=git_server.repo_mode, repo_name=git_server.repo_name)
78 repo_mode=git_server.repo_mode, repo_name=git_server.repo_name)
79 )
79 )
80 assert expected_command == server.tunnel.command()
80 assert expected_command == server.tunnel.command()
81
81
82 @pytest.mark.parametrize('permissions, action, code', [
82 @pytest.mark.parametrize('permissions, action, code', [
83 ({}, 'pull', -2),
83 ({}, 'pull', -2),
84 ({'test_git': 'repository.read'}, 'pull', 0),
84 ({'test_git': 'repository.read'}, 'pull', 0),
85 ({'test_git': 'repository.read'}, 'push', -2),
85 ({'test_git': 'repository.read'}, 'push', -2),
86 ({'test_git': 'repository.write'}, 'push', 0),
86 ({'test_git': 'repository.write'}, 'push', 0),
87 ({'test_git': 'repository.admin'}, 'push', 0),
87 ({'test_git': 'repository.admin'}, 'push', 0),
88
88
89 ])
89 ])
90 def test_permission_checks(self, git_server, permissions, action, code):
90 def test_permission_checks(self, git_server, permissions, action, code):
91 server = git_server.create(user_permissions=permissions)
91 server = git_server.create(user_permissions=permissions)
92 result = server._check_permissions(action)
92 result = server._check_permissions(action)
93 assert result is code
93 assert result is code
94
94
95 @pytest.mark.parametrize('permissions, value', [
95 @pytest.mark.parametrize('permissions, value', [
96 ({}, False),
96 ({}, False),
97 ({'test_git': 'repository.read'}, False),
97 ({'test_git': 'repository.read'}, False),
98 ({'test_git': 'repository.write'}, True),
98 ({'test_git': 'repository.write'}, True),
99 ({'test_git': 'repository.admin'}, True),
99 ({'test_git': 'repository.admin'}, True),
100
100
101 ])
101 ])
102 def test_has_write_permissions(self, git_server, permissions, value):
102 def test_has_write_permissions(self, git_server, permissions, value):
103 server = git_server.create(user_permissions=permissions)
103 server = git_server.create(user_permissions=permissions)
104 result = server.has_write_perm()
104 result = server.has_write_perm()
105 assert result is value
105 assert result is value
106
106
107 def test_run_returns_executes_command(self, git_server):
107 def test_run_returns_executes_command(self, git_server):
108 server = git_server.create()
108 server = git_server.create()
109 from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper
109 from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper
110 with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch:
110 with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch:
111 _patch.return_value = 0
111 _patch.return_value = 0
112 with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'):
112 with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'):
113 exit_code = server.run()
113 exit_code = server.run()
114
114
115 assert exit_code == (0, False)
115 assert exit_code == (0, False)
116
116
117 @pytest.mark.parametrize(
117 @pytest.mark.parametrize(
118 'repo_mode, action', [
118 'repo_mode, action', [
119 ['receive-pack', 'push'],
119 ['receive-pack', 'push'],
120 ['upload-pack', 'pull']
120 ['upload-pack', 'pull']
121 ])
121 ])
122 def test_update_environment(self, git_server, repo_mode, action):
122 def test_update_environment(self, git_server, repo_mode, action):
123 server = git_server.create(repo_mode=repo_mode)
123 server = git_server.create(repo_mode=repo_mode)
124 store = server.store
124 store = server.store
125
125
126 with mock.patch('os.environ', {'SSH_CLIENT': '10.10.10.10 b'}):
126 with mock.patch('os.environ', {'SSH_CLIENT': '10.10.10.10 b'}):
127 with mock.patch('os.putenv') as putenv_mock:
127 with mock.patch('os.putenv') as putenv_mock:
128 server.update_environment(action)
128 server.update_environment(action)
129
129
130 expected_data = {
130 expected_data = {
131 'username': git_server.user.username,
131 'username': git_server.user.username,
132 'user_id': git_server.user.user_id,
132 'user_id': git_server.user.user_id,
133 'scm': 'git',
133 'scm': 'git',
134 'repository': git_server.repo_name,
134 'repository': git_server.repo_name,
135 'make_lock': None,
135 'make_lock': None,
136 'action': action,
136 'action': action,
137 'ip': '10.10.10.10',
137 'ip': '10.10.10.10',
138 'locked_by': [None, None],
138 'locked_by': [None, None],
139 'config': '',
139 'config': '',
140 'repo_store': store,
140 'repo_store': store,
141 'server_url': None,
141 'server_url': None,
142 'hooks': ['push', 'pull'],
142 'hooks': ['push', 'pull'],
143 'is_shadow_repo': False,
143 'is_shadow_repo': False,
144 'hooks_module': 'rhodecode.lib.hooks_daemon',
144 'hooks_module': 'rhodecode.lib.hooks_daemon',
145 'check_branch_perms': False,
145 'check_branch_perms': False,
146 'detect_force_push': False,
146 'detect_force_push': False,
147 'user_agent': u'ssh-user-agent',
147 'user_agent': u'ssh-user-agent',
148 'SSH': True,
148 'SSH': True,
149 'SSH_PERMISSIONS': 'repository.admin',
149 'SSH_PERMISSIONS': 'repository.admin',
150 }
150 }
151 args, kwargs = putenv_mock.call_args
151 args, kwargs = putenv_mock.call_args
152 assert json.loads(args[1]) == expected_data
152 assert json.loads(args[1]) == expected_data
@@ -1,119 +1,119 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import mock
22 import mock
23 import pytest
23 import pytest
24
24
25 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialServer
25 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialServer
26 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
26 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
27
27
28
28
29 class MercurialServerCreator(object):
29 class MercurialServerCreator(object):
30 root = '/tmp/repo/path/'
30 root = '/tmp/repo/path/'
31 hg_path = '/usr/local/bin/hg'
31 hg_path = '/usr/local/bin/hg'
32
32
33 config_data = {
33 config_data = {
34 'app:main': {
34 'app:main': {
35 'ssh.executable.hg': hg_path,
35 'ssh.executable.hg': hg_path,
36 'vcs.hooks.protocol': 'http',
36 'vcs.hooks.protocol': 'http',
37 }
37 }
38 }
38 }
39 repo_name = 'test_hg'
39 repo_name = 'test_hg'
40 user = plain_dummy_user()
40 user = plain_dummy_user()
41
41
42 def __init__(self):
42 def __init__(self):
43 def config_get(part, key):
43 def config_get(part, key):
44 return self.config_data.get(part, {}).get(key)
44 return self.config_data.get(part, {}).get(key)
45 self.config_mock = mock.Mock()
45 self.config_mock = mock.Mock()
46 self.config_mock.get = mock.Mock(side_effect=config_get)
46 self.config_mock.get = mock.Mock(side_effect=config_get)
47
47
48 def create(self, **kwargs):
48 def create(self, **kwargs):
49 parameters = {
49 parameters = {
50 'store': self.root,
50 'store': self.root,
51 'ini_path': '',
51 'ini_path': '',
52 'user': self.user,
52 'user': self.user,
53 'repo_name': self.repo_name,
53 'repo_name': self.repo_name,
54 'user_permissions': {
54 'user_permissions': {
55 'test_hg': 'repository.admin'
55 'test_hg': 'repository.admin'
56 },
56 },
57 'config': self.config_mock,
57 'config': self.config_mock,
58 'env': plain_dummy_env()
58 'env': plain_dummy_env()
59 }
59 }
60 parameters.update(kwargs)
60 parameters.update(kwargs)
61 server = MercurialServer(**parameters)
61 server = MercurialServer(**parameters)
62 return server
62 return server
63
63
64
64
65 @pytest.fixture
65 @pytest.fixture()
66 def hg_server(app):
66 def hg_server(app):
67 return MercurialServerCreator()
67 return MercurialServerCreator()
68
68
69
69
70 class TestMercurialServer(object):
70 class TestMercurialServer(object):
71
71
72 def test_command(self, hg_server, tmpdir):
72 def test_command(self, hg_server, tmpdir):
73 server = hg_server.create()
73 server = hg_server.create()
74 custom_hgrc = os.path.join(str(tmpdir), 'hgrc')
74 custom_hgrc = os.path.join(str(tmpdir), 'hgrc')
75 expected_command = (
75 expected_command = (
76 'cd {root}; HGRCPATH={custom_hgrc} {hg_path} -R {root}{repo_name} serve --stdio'.format(
76 'cd {root}; HGRCPATH={custom_hgrc} {hg_path} -R {root}{repo_name} serve --stdio'.format(
77 root=hg_server.root, custom_hgrc=custom_hgrc, hg_path=hg_server.hg_path,
77 root=hg_server.root, custom_hgrc=custom_hgrc, hg_path=hg_server.hg_path,
78 repo_name=hg_server.repo_name)
78 repo_name=hg_server.repo_name)
79 )
79 )
80 server_command = server.tunnel.command(custom_hgrc)
80 server_command = server.tunnel.command(custom_hgrc)
81 assert expected_command == server_command
81 assert expected_command == server_command
82
82
83 @pytest.mark.parametrize('permissions, action, code', [
83 @pytest.mark.parametrize('permissions, action, code', [
84 ({}, 'pull', -2),
84 ({}, 'pull', -2),
85 ({'test_hg': 'repository.read'}, 'pull', 0),
85 ({'test_hg': 'repository.read'}, 'pull', 0),
86 ({'test_hg': 'repository.read'}, 'push', -2),
86 ({'test_hg': 'repository.read'}, 'push', -2),
87 ({'test_hg': 'repository.write'}, 'push', 0),
87 ({'test_hg': 'repository.write'}, 'push', 0),
88 ({'test_hg': 'repository.admin'}, 'push', 0),
88 ({'test_hg': 'repository.admin'}, 'push', 0),
89
89
90 ])
90 ])
91 def test_permission_checks(self, hg_server, permissions, action, code):
91 def test_permission_checks(self, hg_server, permissions, action, code):
92 server = hg_server.create(user_permissions=permissions)
92 server = hg_server.create(user_permissions=permissions)
93 result = server._check_permissions(action)
93 result = server._check_permissions(action)
94 assert result is code
94 assert result is code
95
95
96 @pytest.mark.parametrize('permissions, value', [
96 @pytest.mark.parametrize('permissions, value', [
97 ({}, False),
97 ({}, False),
98 ({'test_hg': 'repository.read'}, False),
98 ({'test_hg': 'repository.read'}, False),
99 ({'test_hg': 'repository.write'}, True),
99 ({'test_hg': 'repository.write'}, True),
100 ({'test_hg': 'repository.admin'}, True),
100 ({'test_hg': 'repository.admin'}, True),
101
101
102 ])
102 ])
103 def test_has_write_permissions(self, hg_server, permissions, value):
103 def test_has_write_permissions(self, hg_server, permissions, value):
104 server = hg_server.create(user_permissions=permissions)
104 server = hg_server.create(user_permissions=permissions)
105 result = server.has_write_perm()
105 result = server.has_write_perm()
106 assert result is value
106 assert result is value
107
107
108 def test_run_returns_executes_command(self, hg_server):
108 def test_run_returns_executes_command(self, hg_server):
109 server = hg_server.create()
109 server = hg_server.create()
110 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialTunnelWrapper
110 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialTunnelWrapper
111 with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch:
111 with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch:
112 _patch.return_value = 0
112 _patch.return_value = 0
113 with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'):
113 with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'):
114 exit_code = server.run()
114 exit_code = server.run()
115
115
116 assert exit_code == (0, False)
116 assert exit_code == (0, False)
117
117
118
118
119
119
@@ -1,124 +1,124 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionServer
24 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionServer
25 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
25 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
26
26
27
27
28 class SubversionServerCreator(object):
28 class SubversionServerCreator(object):
29 root = '/tmp/repo/path/'
29 root = '/tmp/repo/path/'
30 svn_path = '/usr/local/bin/svnserve'
30 svn_path = '/usr/local/bin/svnserve'
31 config_data = {
31 config_data = {
32 'app:main': {
32 'app:main': {
33 'ssh.executable.svn': svn_path,
33 'ssh.executable.svn': svn_path,
34 'vcs.hooks.protocol': 'http',
34 'vcs.hooks.protocol': 'http',
35 }
35 }
36 }
36 }
37 repo_name = 'test-svn'
37 repo_name = 'test-svn'
38 user = plain_dummy_user()
38 user = plain_dummy_user()
39
39
40 def __init__(self):
40 def __init__(self):
41 def config_get(part, key):
41 def config_get(part, key):
42 return self.config_data.get(part, {}).get(key)
42 return self.config_data.get(part, {}).get(key)
43 self.config_mock = mock.Mock()
43 self.config_mock = mock.Mock()
44 self.config_mock.get = mock.Mock(side_effect=config_get)
44 self.config_mock.get = mock.Mock(side_effect=config_get)
45
45
46 def create(self, **kwargs):
46 def create(self, **kwargs):
47 parameters = {
47 parameters = {
48 'store': self.root,
48 'store': self.root,
49 'repo_name': self.repo_name,
49 'repo_name': self.repo_name,
50 'ini_path': '',
50 'ini_path': '',
51 'user': self.user,
51 'user': self.user,
52 'user_permissions': {
52 'user_permissions': {
53 self.repo_name: 'repository.admin'
53 self.repo_name: 'repository.admin'
54 },
54 },
55 'config': self.config_mock,
55 'config': self.config_mock,
56 'env': plain_dummy_env()
56 'env': plain_dummy_env()
57 }
57 }
58
58
59 parameters.update(kwargs)
59 parameters.update(kwargs)
60 server = SubversionServer(**parameters)
60 server = SubversionServer(**parameters)
61 return server
61 return server
62
62
63
63
64 @pytest.fixture
64 @pytest.fixture()
65 def svn_server(app):
65 def svn_server(app):
66 return SubversionServerCreator()
66 return SubversionServerCreator()
67
67
68
68
69 class TestSubversionServer(object):
69 class TestSubversionServer(object):
70 def test_command(self, svn_server):
70 def test_command(self, svn_server):
71 server = svn_server.create()
71 server = svn_server.create()
72 expected_command = [
72 expected_command = [
73 svn_server.svn_path, '-t', '--config-file',
73 svn_server.svn_path, '-t', '--config-file',
74 server.tunnel.svn_conf_path, '-r', svn_server.root
74 server.tunnel.svn_conf_path, '-r', svn_server.root
75 ]
75 ]
76
76
77 assert expected_command == server.tunnel.command()
77 assert expected_command == server.tunnel.command()
78
78
79 @pytest.mark.parametrize('permissions, action, code', [
79 @pytest.mark.parametrize('permissions, action, code', [
80 ({}, 'pull', -2),
80 ({}, 'pull', -2),
81 ({'test-svn': 'repository.read'}, 'pull', 0),
81 ({'test-svn': 'repository.read'}, 'pull', 0),
82 ({'test-svn': 'repository.read'}, 'push', -2),
82 ({'test-svn': 'repository.read'}, 'push', -2),
83 ({'test-svn': 'repository.write'}, 'push', 0),
83 ({'test-svn': 'repository.write'}, 'push', 0),
84 ({'test-svn': 'repository.admin'}, 'push', 0),
84 ({'test-svn': 'repository.admin'}, 'push', 0),
85
85
86 ])
86 ])
87 def test_permission_checks(self, svn_server, permissions, action, code):
87 def test_permission_checks(self, svn_server, permissions, action, code):
88 server = svn_server.create(user_permissions=permissions)
88 server = svn_server.create(user_permissions=permissions)
89 result = server._check_permissions(action)
89 result = server._check_permissions(action)
90 assert result is code
90 assert result is code
91
91
92 def test_run_returns_executes_command(self, svn_server):
92 def test_run_returns_executes_command(self, svn_server):
93 server = svn_server.create()
93 server = svn_server.create()
94 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
94 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
95 with mock.patch.object(
95 with mock.patch.object(
96 SubversionTunnelWrapper, 'get_first_client_response',
96 SubversionTunnelWrapper, 'get_first_client_response',
97 return_value={'url': 'http://server/test-svn'}):
97 return_value={'url': 'http://server/test-svn'}):
98 with mock.patch.object(
98 with mock.patch.object(
99 SubversionTunnelWrapper, 'patch_first_client_response',
99 SubversionTunnelWrapper, 'patch_first_client_response',
100 return_value=0):
100 return_value=0):
101 with mock.patch.object(
101 with mock.patch.object(
102 SubversionTunnelWrapper, 'sync',
102 SubversionTunnelWrapper, 'sync',
103 return_value=0):
103 return_value=0):
104 with mock.patch.object(
104 with mock.patch.object(
105 SubversionTunnelWrapper, 'command',
105 SubversionTunnelWrapper, 'command',
106 return_value=['date']):
106 return_value=['date']):
107
107
108 exit_code = server.run()
108 exit_code = server.run()
109 # SVN has this differently configured, and we get in our mock env
109 # SVN has this differently configured, and we get in our mock env
110 # None as return code
110 # None as return code
111 assert exit_code == (None, False)
111 assert exit_code == (None, False)
112
112
113 def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server):
113 def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server):
114 server = svn_server.create()
114 server = svn_server.create()
115 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
115 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
116 with mock.patch.object(
116 with mock.patch.object(
117 SubversionTunnelWrapper, 'command',
117 SubversionTunnelWrapper, 'command',
118 return_value=['date']):
118 return_value=['date']):
119 with mock.patch.object(
119 with mock.patch.object(
120 SubversionTunnelWrapper, 'get_first_client_response',
120 SubversionTunnelWrapper, 'get_first_client_response',
121 return_value=None):
121 return_value=None):
122 exit_code = server.run()
122 exit_code = server.run()
123
123
124 assert exit_code == (1, False)
124 assert exit_code == (1, False)
@@ -1,98 +1,97 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Compatibility patches.
22 Compatibility patches.
23
23
24 Please keep the following principles in mind:
24 Please keep the following principles in mind:
25
25
26 * Keep imports local, so that importing this module does not cause too many
26 * Keep imports local, so that importing this module does not cause too many
27 side effects by itself.
27 side effects by itself.
28
28
29 * Try to make patches idempotent, calling them multiple times should not do
29 * Try to make patches idempotent, calling them multiple times should not do
30 harm. If that is not possible, ensure that the second call explodes.
30 harm. If that is not possible, ensure that the second call explodes.
31
31
32 """
32 """
33
33
34
34
35
36 def inspect_getargspec():
35 def inspect_getargspec():
37 """
36 """
38 Pyramid rely on inspect.getargspec to lookup the signature of
37 Pyramid rely on inspect.getargspec to lookup the signature of
39 view functions. This is not compatible with cython, therefore we replace
38 view functions. This is not compatible with cython, therefore we replace
40 getargspec with a custom version.
39 getargspec with a custom version.
41 Code is inspired by the inspect module from Python-3.4
40 Code is inspired by the inspect module from Python-3.4
42 """
41 """
43 import inspect
42 import inspect
44
43
45 def _isCython(func):
44 def _isCython(func):
46 """
45 """
47 Private helper that checks if a function is a cython function.
46 Private helper that checks if a function is a cython function.
48 """
47 """
49 return func.__class__.__name__ == 'cython_function_or_method'
48 return func.__class__.__name__ == 'cython_function_or_method'
50
49
51 def unwrap(func):
50 def unwrap(func):
52 """
51 """
53 Get the object wrapped by *func*.
52 Get the object wrapped by *func*.
54
53
55 Follows the chain of :attr:`__wrapped__` attributes returning the last
54 Follows the chain of :attr:`__wrapped__` attributes returning the last
56 object in the chain.
55 object in the chain.
57
56
58 *stop* is an optional callback accepting an object in the wrapper chain
57 *stop* is an optional callback accepting an object in the wrapper chain
59 as its sole argument that allows the unwrapping to be terminated early
58 as its sole argument that allows the unwrapping to be terminated early
60 if the callback returns a true value. If the callback never returns a
59 if the callback returns a true value. If the callback never returns a
61 true value, the last object in the chain is returned as usual. For
60 true value, the last object in the chain is returned as usual. For
62 example, :func:`signature` uses this to stop unwrapping if any object
61 example, :func:`signature` uses this to stop unwrapping if any object
63 in the chain has a ``__signature__`` attribute defined.
62 in the chain has a ``__signature__`` attribute defined.
64
63
65 :exc:`ValueError` is raised if a cycle is encountered.
64 :exc:`ValueError` is raised if a cycle is encountered.
66 """
65 """
67 f = func # remember the original func for error reporting
66 f = func # remember the original func for error reporting
68 memo = {id(f)} # Memoise by id to tolerate non-hashable objects
67 memo = {id(f)} # Memoise by id to tolerate non-hashable objects
69 while hasattr(func, '__wrapped__'):
68 while hasattr(func, '__wrapped__'):
70 func = func.__wrapped__
69 func = func.__wrapped__
71 id_func = id(func)
70 id_func = id(func)
72 if id_func in memo:
71 if id_func in memo:
73 raise ValueError('wrapper loop when unwrapping {!r}'.format(f))
72 raise ValueError('wrapper loop when unwrapping {!r}'.format(f))
74 memo.add(id_func)
73 memo.add(id_func)
75 return func
74 return func
76
75
77 def custom_getargspec(func):
76 def custom_getargspec(func):
78 """
77 """
79 Get the names and default values of a function's arguments.
78 Get the names and default values of a function's arguments.
80
79
81 A tuple of four things is returned: (args, varargs, varkw, defaults).
80 A tuple of four things is returned: (args, varargs, varkw, defaults).
82 'args' is a list of the argument names (it may contain nested lists).
81 'args' is a list of the argument names (it may contain nested lists).
83 'varargs' and 'varkw' are the names of the * and ** arguments or None.
82 'varargs' and 'varkw' are the names of the * and ** arguments or None.
84 'defaults' is an n-tuple of the default values of the last n arguments.
83 'defaults' is an n-tuple of the default values of the last n arguments.
85 """
84 """
86
85
87 func = unwrap(func)
86 func = unwrap(func)
88
87
89 if inspect.ismethod(func):
88 if inspect.ismethod(func):
90 func = func.im_func
89 func = func.im_func
91 if not inspect.isfunction(func):
90 if not inspect.isfunction(func):
92 if not _isCython(func):
91 if not _isCython(func):
93 raise TypeError('{!r} is not a Python or Cython function'
92 raise TypeError('{!r} is not a Python or Cython function'
94 .format(func))
93 .format(func))
95 args, varargs, varkw = inspect.getargs(func.func_code)
94 args, varargs, varkw = inspect.getargs(func.func_code)
96 return inspect.ArgSpec(args, varargs, varkw, func.func_defaults)
95 return inspect.ArgSpec(args, varargs, varkw, func.func_defaults)
97
96
98 inspect.getargspec = custom_getargspec
97 inspect.getargspec = custom_getargspec
@@ -1,53 +1,53 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import mock
22 import mock
23 import pytest
23 import pytest
24
24
25 from rhodecode.config import environment
25 from rhodecode.config import environment
26
26
27
27
28 @pytest.fixture
28 @pytest.fixture()
29 def _external_calls_patcher(request):
29 def _external_calls_patcher(request):
30 # TODO: mikhail: This is a temporary solution. Ideally load_environment
30 # TODO: mikhail: This is a temporary solution. Ideally load_environment
31 # should be split into multiple small testable functions.
31 # should be split into multiple small testable functions.
32 utils_patcher = mock.patch.object(environment, 'utils')
32 utils_patcher = mock.patch.object(environment, 'utils')
33
33
34 rhodecode_patcher = mock.patch.object(environment, 'rhodecode')
34 rhodecode_patcher = mock.patch.object(environment, 'rhodecode')
35
35
36 db_config = mock.Mock()
36 db_config = mock.Mock()
37 db_config.items.return_value = {
37 db_config.items.return_value = {
38 'paths': [['/tmp/abc', '/tmp/def']]
38 'paths': [['/tmp/abc', '/tmp/def']]
39 }
39 }
40 db_config_patcher = mock.patch.object(
40 db_config_patcher = mock.patch.object(
41 environment, 'make_db_config', return_value=db_config)
41 environment, 'make_db_config', return_value=db_config)
42
42
43 set_config_patcher = mock.patch.object(environment, 'set_rhodecode_config')
43 set_config_patcher = mock.patch.object(environment, 'set_rhodecode_config')
44
44
45 utils_patcher.start()
45 utils_patcher.start()
46 rhodecode_patcher.start()
46 rhodecode_patcher.start()
47 db_config_patcher.start()
47 db_config_patcher.start()
48 set_config_patcher.start()
48 set_config_patcher.start()
49
49
50 request.addfinalizer(utils_patcher.stop)
50 request.addfinalizer(utils_patcher.stop)
51 request.addfinalizer(rhodecode_patcher.stop)
51 request.addfinalizer(rhodecode_patcher.stop)
52 request.addfinalizer(db_config_patcher.stop)
52 request.addfinalizer(db_config_patcher.stop)
53 request.addfinalizer(set_config_patcher.stop)
53 request.addfinalizer(set_config_patcher.stop)
@@ -1,292 +1,292 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from subprocess32 import Popen, PIPE
21 from subprocess32 import Popen, PIPE
22 import os
22 import os
23 import shutil
23 import shutil
24 import sys
24 import sys
25 import tempfile
25 import tempfile
26
26
27 import pytest
27 import pytest
28 from sqlalchemy.engine import url
28 from sqlalchemy.engine import url
29
29
30 from rhodecode.tests.fixture import TestINI
30 from rhodecode.tests.fixture import TestINI
31
31
32
32
33 def _get_dbs_from_metafunc(metafunc):
33 def _get_dbs_from_metafunc(metafunc):
34 if hasattr(metafunc.function, 'dbs'):
34 if hasattr(metafunc.function, 'dbs'):
35 # Supported backends by this test function, created from
35 # Supported backends by this test function, created from
36 # pytest.mark.dbs
36 # pytest.mark.dbs
37 backends = metafunc.definition.get_closest_marker('dbs').args
37 backends = metafunc.definition.get_closest_marker('dbs').args
38 else:
38 else:
39 backends = metafunc.config.getoption('--dbs')
39 backends = metafunc.config.getoption('--dbs')
40 return backends
40 return backends
41
41
42
42
43 def pytest_generate_tests(metafunc):
43 def pytest_generate_tests(metafunc):
44 # Support test generation based on --dbs parameter
44 # Support test generation based on --dbs parameter
45 if 'db_backend' in metafunc.fixturenames:
45 if 'db_backend' in metafunc.fixturenames:
46 requested_backends = set(metafunc.config.getoption('--dbs'))
46 requested_backends = set(metafunc.config.getoption('--dbs'))
47 backends = _get_dbs_from_metafunc(metafunc)
47 backends = _get_dbs_from_metafunc(metafunc)
48 backends = requested_backends.intersection(backends)
48 backends = requested_backends.intersection(backends)
49 # TODO: johbo: Disabling a backend did not work out with
49 # TODO: johbo: Disabling a backend did not work out with
50 # parametrization, find better way to achieve this.
50 # parametrization, find better way to achieve this.
51 if not backends:
51 if not backends:
52 metafunc.function._skip = True
52 metafunc.function._skip = True
53 metafunc.parametrize('db_backend_name', backends)
53 metafunc.parametrize('db_backend_name', backends)
54
54
55
55
56 def pytest_collection_modifyitems(session, config, items):
56 def pytest_collection_modifyitems(session, config, items):
57 remaining = [
57 remaining = [
58 i for i in items if not getattr(i.obj, '_skip', False)]
58 i for i in items if not getattr(i.obj, '_skip', False)]
59 items[:] = remaining
59 items[:] = remaining
60
60
61
61
62 @pytest.fixture
62 @pytest.fixture()
63 def db_backend(
63 def db_backend(
64 request, db_backend_name, ini_config, tmpdir_factory):
64 request, db_backend_name, ini_config, tmpdir_factory):
65 basetemp = tmpdir_factory.getbasetemp().strpath
65 basetemp = tmpdir_factory.getbasetemp().strpath
66 klass = _get_backend(db_backend_name)
66 klass = _get_backend(db_backend_name)
67
67
68 option_name = '--{}-connection-string'.format(db_backend_name)
68 option_name = '--{}-connection-string'.format(db_backend_name)
69 connection_string = request.config.getoption(option_name) or None
69 connection_string = request.config.getoption(option_name) or None
70
70
71 return klass(
71 return klass(
72 config_file=ini_config, basetemp=basetemp,
72 config_file=ini_config, basetemp=basetemp,
73 connection_string=connection_string)
73 connection_string=connection_string)
74
74
75
75
76 def _get_backend(backend_type):
76 def _get_backend(backend_type):
77 return {
77 return {
78 'sqlite': SQLiteDBBackend,
78 'sqlite': SQLiteDBBackend,
79 'postgres': PostgresDBBackend,
79 'postgres': PostgresDBBackend,
80 'mysql': MySQLDBBackend,
80 'mysql': MySQLDBBackend,
81 '': EmptyDBBackend
81 '': EmptyDBBackend
82 }[backend_type]
82 }[backend_type]
83
83
84
84
85 class DBBackend(object):
85 class DBBackend(object):
86 _store = os.path.dirname(os.path.abspath(__file__))
86 _store = os.path.dirname(os.path.abspath(__file__))
87 _type = None
87 _type = None
88 _base_ini_config = [{'app:main': {'vcs.start_server': 'false',
88 _base_ini_config = [{'app:main': {'vcs.start_server': 'false',
89 'startup.import_repos': 'false',
89 'startup.import_repos': 'false',
90 'is_test': 'False'}}]
90 'is_test': 'False'}}]
91 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
91 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
92 _base_db_name = 'rhodecode_test_db_backend'
92 _base_db_name = 'rhodecode_test_db_backend'
93
93
94 def __init__(
94 def __init__(
95 self, config_file, db_name=None, basetemp=None,
95 self, config_file, db_name=None, basetemp=None,
96 connection_string=None):
96 connection_string=None):
97
97
98 from rhodecode.lib.vcs.backends.hg import largefiles_store
98 from rhodecode.lib.vcs.backends.hg import largefiles_store
99 from rhodecode.lib.vcs.backends.git import lfs_store
99 from rhodecode.lib.vcs.backends.git import lfs_store
100
100
101 self.fixture_store = os.path.join(self._store, self._type)
101 self.fixture_store = os.path.join(self._store, self._type)
102 self.db_name = db_name or self._base_db_name
102 self.db_name = db_name or self._base_db_name
103 self._base_ini_file = config_file
103 self._base_ini_file = config_file
104 self.stderr = ''
104 self.stderr = ''
105 self.stdout = ''
105 self.stdout = ''
106 self._basetemp = basetemp or tempfile.gettempdir()
106 self._basetemp = basetemp or tempfile.gettempdir()
107 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
107 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
108 self._repos_hg_largefiles_store = largefiles_store(self._basetemp)
108 self._repos_hg_largefiles_store = largefiles_store(self._basetemp)
109 self._repos_git_lfs_store = lfs_store(self._basetemp)
109 self._repos_git_lfs_store = lfs_store(self._basetemp)
110 self.connection_string = connection_string
110 self.connection_string = connection_string
111
111
112 @property
112 @property
113 def connection_string(self):
113 def connection_string(self):
114 return self._connection_string
114 return self._connection_string
115
115
116 @connection_string.setter
116 @connection_string.setter
117 def connection_string(self, new_connection_string):
117 def connection_string(self, new_connection_string):
118 if not new_connection_string:
118 if not new_connection_string:
119 new_connection_string = self.get_default_connection_string()
119 new_connection_string = self.get_default_connection_string()
120 else:
120 else:
121 new_connection_string = new_connection_string.format(
121 new_connection_string = new_connection_string.format(
122 db_name=self.db_name)
122 db_name=self.db_name)
123 url_parts = url.make_url(new_connection_string)
123 url_parts = url.make_url(new_connection_string)
124 self._connection_string = new_connection_string
124 self._connection_string = new_connection_string
125 self.user = url_parts.username
125 self.user = url_parts.username
126 self.password = url_parts.password
126 self.password = url_parts.password
127 self.host = url_parts.host
127 self.host = url_parts.host
128
128
129 def get_default_connection_string(self):
129 def get_default_connection_string(self):
130 raise NotImplementedError('default connection_string is required.')
130 raise NotImplementedError('default connection_string is required.')
131
131
132 def execute(self, cmd, env=None, *args):
132 def execute(self, cmd, env=None, *args):
133 """
133 """
134 Runs command on the system with given ``args``.
134 Runs command on the system with given ``args``.
135 """
135 """
136
136
137 command = cmd + ' ' + ' '.join(args)
137 command = cmd + ' ' + ' '.join(args)
138 sys.stdout.write(command)
138 sys.stdout.write(command)
139
139
140 # Tell Python to use UTF-8 encoding out stdout
140 # Tell Python to use UTF-8 encoding out stdout
141 _env = os.environ.copy()
141 _env = os.environ.copy()
142 _env['PYTHONIOENCODING'] = 'UTF-8'
142 _env['PYTHONIOENCODING'] = 'UTF-8'
143 if env:
143 if env:
144 _env.update(env)
144 _env.update(env)
145 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
145 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
146 self.stdout, self.stderr = self.p.communicate()
146 self.stdout, self.stderr = self.p.communicate()
147 sys.stdout.write('COMMAND:'+command+'\n')
147 sys.stdout.write('COMMAND:'+command+'\n')
148 sys.stdout.write(self.stdout)
148 sys.stdout.write(self.stdout)
149 return self.stdout, self.stderr
149 return self.stdout, self.stderr
150
150
151 def assert_returncode_success(self):
151 def assert_returncode_success(self):
152 if not self.p.returncode == 0:
152 if not self.p.returncode == 0:
153 print(self.stderr)
153 print(self.stderr)
154 raise AssertionError('non 0 retcode:{}'.format(self.p.returncode))
154 raise AssertionError('non 0 retcode:{}'.format(self.p.returncode))
155
155
156 def assert_correct_output(self, stdout, version):
156 def assert_correct_output(self, stdout, version):
157 assert 'UPGRADE FOR STEP {} COMPLETED'.format(version) in stdout
157 assert 'UPGRADE FOR STEP {} COMPLETED'.format(version) in stdout
158
158
159 def setup_rhodecode_db(self, ini_params=None, env=None):
159 def setup_rhodecode_db(self, ini_params=None, env=None):
160 if not ini_params:
160 if not ini_params:
161 ini_params = self._base_ini_config
161 ini_params = self._base_ini_config
162
162
163 ini_params.extend(self._db_url)
163 ini_params.extend(self._db_url)
164 with TestINI(self._base_ini_file, ini_params,
164 with TestINI(self._base_ini_file, ini_params,
165 self._type, destroy=True) as _ini_file:
165 self._type, destroy=True) as _ini_file:
166
166
167 if not os.path.isdir(self._repos_location):
167 if not os.path.isdir(self._repos_location):
168 os.makedirs(self._repos_location)
168 os.makedirs(self._repos_location)
169 if not os.path.isdir(self._repos_hg_largefiles_store):
169 if not os.path.isdir(self._repos_hg_largefiles_store):
170 os.makedirs(self._repos_hg_largefiles_store)
170 os.makedirs(self._repos_hg_largefiles_store)
171 if not os.path.isdir(self._repos_git_lfs_store):
171 if not os.path.isdir(self._repos_git_lfs_store):
172 os.makedirs(self._repos_git_lfs_store)
172 os.makedirs(self._repos_git_lfs_store)
173
173
174 return self.execute(
174 return self.execute(
175 "rc-setup-app {0} --user=marcink "
175 "rc-setup-app {0} --user=marcink "
176 "--email=marcin@rhodeocode.com --password={1} "
176 "--email=marcin@rhodeocode.com --password={1} "
177 "--repos={2} --force-yes".format(
177 "--repos={2} --force-yes".format(
178 _ini_file, 'qweqwe', self._repos_location), env=env)
178 _ini_file, 'qweqwe', self._repos_location), env=env)
179
179
180 def upgrade_database(self, ini_params=None):
180 def upgrade_database(self, ini_params=None):
181 if not ini_params:
181 if not ini_params:
182 ini_params = self._base_ini_config
182 ini_params = self._base_ini_config
183 ini_params.extend(self._db_url)
183 ini_params.extend(self._db_url)
184
184
185 test_ini = TestINI(
185 test_ini = TestINI(
186 self._base_ini_file, ini_params, self._type, destroy=True)
186 self._base_ini_file, ini_params, self._type, destroy=True)
187 with test_ini as ini_file:
187 with test_ini as ini_file:
188 if not os.path.isdir(self._repos_location):
188 if not os.path.isdir(self._repos_location):
189 os.makedirs(self._repos_location)
189 os.makedirs(self._repos_location)
190
190
191 return self.execute(
191 return self.execute(
192 "rc-upgrade-db {0} --force-yes".format(ini_file))
192 "rc-upgrade-db {0} --force-yes".format(ini_file))
193
193
194 def setup_db(self):
194 def setup_db(self):
195 raise NotImplementedError
195 raise NotImplementedError
196
196
197 def teardown_db(self):
197 def teardown_db(self):
198 raise NotImplementedError
198 raise NotImplementedError
199
199
200 def import_dump(self, dumpname):
200 def import_dump(self, dumpname):
201 raise NotImplementedError
201 raise NotImplementedError
202
202
203
203
204 class EmptyDBBackend(DBBackend):
204 class EmptyDBBackend(DBBackend):
205 _type = ''
205 _type = ''
206
206
207 def setup_db(self):
207 def setup_db(self):
208 pass
208 pass
209
209
210 def teardown_db(self):
210 def teardown_db(self):
211 pass
211 pass
212
212
213 def import_dump(self, dumpname):
213 def import_dump(self, dumpname):
214 pass
214 pass
215
215
216 def assert_returncode_success(self):
216 def assert_returncode_success(self):
217 assert True
217 assert True
218
218
219
219
220 class SQLiteDBBackend(DBBackend):
220 class SQLiteDBBackend(DBBackend):
221 _type = 'sqlite'
221 _type = 'sqlite'
222
222
223 def get_default_connection_string(self):
223 def get_default_connection_string(self):
224 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
224 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
225
225
226 def setup_db(self):
226 def setup_db(self):
227 # dump schema for tests
227 # dump schema for tests
228 # cp -v $TEST_DB_NAME
228 # cp -v $TEST_DB_NAME
229 self._db_url = [{'app:main': {
229 self._db_url = [{'app:main': {
230 'sqlalchemy.db1.url': self.connection_string}}]
230 'sqlalchemy.db1.url': self.connection_string}}]
231
231
232 def import_dump(self, dumpname):
232 def import_dump(self, dumpname):
233 dump = os.path.join(self.fixture_store, dumpname)
233 dump = os.path.join(self.fixture_store, dumpname)
234 target = os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self))
234 target = os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self))
235 return self.execute('cp -v {} {}'.format(dump, target))
235 return self.execute('cp -v {} {}'.format(dump, target))
236
236
237 def teardown_db(self):
237 def teardown_db(self):
238 return self.execute("rm -rf {}.sqlite".format(
238 return self.execute("rm -rf {}.sqlite".format(
239 os.path.join(self._basetemp, self.db_name)))
239 os.path.join(self._basetemp, self.db_name)))
240
240
241
241
242 class MySQLDBBackend(DBBackend):
242 class MySQLDBBackend(DBBackend):
243 _type = 'mysql'
243 _type = 'mysql'
244
244
245 def get_default_connection_string(self):
245 def get_default_connection_string(self):
246 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
246 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
247
247
248 def setup_db(self):
248 def setup_db(self):
249 # dump schema for tests
249 # dump schema for tests
250 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
250 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
251 self._db_url = [{'app:main': {
251 self._db_url = [{'app:main': {
252 'sqlalchemy.db1.url': self.connection_string}}]
252 'sqlalchemy.db1.url': self.connection_string}}]
253 return self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
253 return self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
254 self.user, self.password, self.db_name))
254 self.user, self.password, self.db_name))
255
255
256 def import_dump(self, dumpname):
256 def import_dump(self, dumpname):
257 dump = os.path.join(self.fixture_store, dumpname)
257 dump = os.path.join(self.fixture_store, dumpname)
258 return self.execute("mysql -u{} -p{} {} < {}".format(
258 return self.execute("mysql -u{} -p{} {} < {}".format(
259 self.user, self.password, self.db_name, dump))
259 self.user, self.password, self.db_name, dump))
260
260
261 def teardown_db(self):
261 def teardown_db(self):
262 return self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
262 return self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
263 self.user, self.password, self.db_name))
263 self.user, self.password, self.db_name))
264
264
265
265
266 class PostgresDBBackend(DBBackend):
266 class PostgresDBBackend(DBBackend):
267 _type = 'postgres'
267 _type = 'postgres'
268
268
269 def get_default_connection_string(self):
269 def get_default_connection_string(self):
270 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
270 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
271
271
272 def setup_db(self):
272 def setup_db(self):
273 # dump schema for tests
273 # dump schema for tests
274 # pg_dump -U postgres -h localhost $TEST_DB_NAME
274 # pg_dump -U postgres -h localhost $TEST_DB_NAME
275 self._db_url = [{'app:main': {
275 self._db_url = [{'app:main': {
276 'sqlalchemy.db1.url':
276 'sqlalchemy.db1.url':
277 self.connection_string}}]
277 self.connection_string}}]
278 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
278 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
279 "-c 'create database '{}';'".format(
279 "-c 'create database '{}';'".format(
280 self.password, self.user, self.db_name))
280 self.password, self.user, self.db_name))
281
281
282 def teardown_db(self):
282 def teardown_db(self):
283 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
283 return self.execute("PGPASSWORD={} psql -U {} -h localhost "
284 "-c 'drop database if exists '{}';'".format(
284 "-c 'drop database if exists '{}';'".format(
285 self.password, self.user, self.db_name))
285 self.password, self.user, self.db_name))
286
286
287 def import_dump(self, dumpname):
287 def import_dump(self, dumpname):
288 dump = os.path.join(self.fixture_store, dumpname)
288 dump = os.path.join(self.fixture_store, dumpname)
289 return self.execute(
289 return self.execute(
290 "PGPASSWORD={} psql -U {} -h localhost -d {} -1 "
290 "PGPASSWORD={} psql -U {} -h localhost -d {} -1 "
291 "-f {}".format(
291 "-f {}".format(
292 self.password, self.user, self.db_name, dump))
292 self.password, self.user, self.db_name, dump))
@@ -1,123 +1,123 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.tests.events.conftest import EventCatcher
23 from rhodecode.tests.events.conftest import EventCatcher
24
24
25 from rhodecode.lib import hooks_base, utils2
25 from rhodecode.lib import hooks_base, utils2
26 from rhodecode.model.repo import RepoModel
26 from rhodecode.model.repo import RepoModel
27 from rhodecode.events.repo import (
27 from rhodecode.events.repo import (
28 RepoPrePullEvent, RepoPullEvent,
28 RepoPrePullEvent, RepoPullEvent,
29 RepoPrePushEvent, RepoPushEvent,
29 RepoPrePushEvent, RepoPushEvent,
30 RepoPreCreateEvent, RepoCreateEvent,
30 RepoPreCreateEvent, RepoCreateEvent,
31 RepoPreDeleteEvent, RepoDeleteEvent,
31 RepoPreDeleteEvent, RepoDeleteEvent,
32 )
32 )
33
33
34
34
35 @pytest.fixture
35 @pytest.fixture()
36 def scm_extras(user_regular, repo_stub):
36 def scm_extras(user_regular, repo_stub):
37 extras = utils2.AttributeDict({
37 extras = utils2.AttributeDict({
38 'ip': '127.0.0.1',
38 'ip': '127.0.0.1',
39 'username': user_regular.username,
39 'username': user_regular.username,
40 'user_id': user_regular.user_id,
40 'user_id': user_regular.user_id,
41 'action': '',
41 'action': '',
42 'repository': repo_stub.repo_name,
42 'repository': repo_stub.repo_name,
43 'scm': repo_stub.scm_instance().alias,
43 'scm': repo_stub.scm_instance().alias,
44 'config': '',
44 'config': '',
45 'repo_store': '',
45 'repo_store': '',
46 'server_url': 'http://example.com',
46 'server_url': 'http://example.com',
47 'make_lock': None,
47 'make_lock': None,
48 'user_agent': 'some-client',
48 'user_agent': 'some-client',
49 'locked_by': [None],
49 'locked_by': [None],
50 'commit_ids': ['a' * 40] * 3,
50 'commit_ids': ['a' * 40] * 3,
51 'hook_type': 'scm_extras_test',
51 'hook_type': 'scm_extras_test',
52 'is_shadow_repo': False,
52 'is_shadow_repo': False,
53 })
53 })
54 return extras
54 return extras
55
55
56
56
57 # TODO: dan: make the serialization tests complete json comparisons
57 # TODO: dan: make the serialization tests complete json comparisons
58 @pytest.mark.parametrize('EventClass', [
58 @pytest.mark.parametrize('EventClass', [
59 RepoPreCreateEvent, RepoCreateEvent,
59 RepoPreCreateEvent, RepoCreateEvent,
60 RepoPreDeleteEvent, RepoDeleteEvent,
60 RepoPreDeleteEvent, RepoDeleteEvent,
61 ])
61 ])
62 def test_repo_events_serialized(config_stub, repo_stub, EventClass):
62 def test_repo_events_serialized(config_stub, repo_stub, EventClass):
63 event = EventClass(repo_stub)
63 event = EventClass(repo_stub)
64 data = event.as_dict()
64 data = event.as_dict()
65 assert data['name'] == EventClass.name
65 assert data['name'] == EventClass.name
66 assert data['repo']['repo_name'] == repo_stub.repo_name
66 assert data['repo']['repo_name'] == repo_stub.repo_name
67 assert data['repo']['url']
67 assert data['repo']['url']
68 assert data['repo']['permalink_url']
68 assert data['repo']['permalink_url']
69
69
70
70
71 @pytest.mark.parametrize('EventClass', [
71 @pytest.mark.parametrize('EventClass', [
72 RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent
72 RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent
73 ])
73 ])
74 def test_vcs_repo_events_serialize(config_stub, repo_stub, scm_extras, EventClass):
74 def test_vcs_repo_events_serialize(config_stub, repo_stub, scm_extras, EventClass):
75 event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras)
75 event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras)
76 data = event.as_dict()
76 data = event.as_dict()
77 assert data['name'] == EventClass.name
77 assert data['name'] == EventClass.name
78 assert data['repo']['repo_name'] == repo_stub.repo_name
78 assert data['repo']['repo_name'] == repo_stub.repo_name
79 assert data['repo']['url']
79 assert data['repo']['url']
80 assert data['repo']['permalink_url']
80 assert data['repo']['permalink_url']
81
81
82
82
83 @pytest.mark.parametrize('EventClass', [RepoPushEvent])
83 @pytest.mark.parametrize('EventClass', [RepoPushEvent])
84 def test_vcs_repo_push_event_serialize(config_stub, repo_stub, scm_extras, EventClass):
84 def test_vcs_repo_push_event_serialize(config_stub, repo_stub, scm_extras, EventClass):
85 event = EventClass(repo_name=repo_stub.repo_name,
85 event = EventClass(repo_name=repo_stub.repo_name,
86 pushed_commit_ids=scm_extras['commit_ids'],
86 pushed_commit_ids=scm_extras['commit_ids'],
87 extras=scm_extras)
87 extras=scm_extras)
88 data = event.as_dict()
88 data = event.as_dict()
89 assert data['name'] == EventClass.name
89 assert data['name'] == EventClass.name
90 assert data['repo']['repo_name'] == repo_stub.repo_name
90 assert data['repo']['repo_name'] == repo_stub.repo_name
91 assert data['repo']['url']
91 assert data['repo']['url']
92 assert data['repo']['permalink_url']
92 assert data['repo']['permalink_url']
93
93
94
94
95 def test_create_delete_repo_fires_events(backend):
95 def test_create_delete_repo_fires_events(backend):
96 with EventCatcher() as event_catcher:
96 with EventCatcher() as event_catcher:
97 repo = backend.create_repo()
97 repo = backend.create_repo()
98 assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreateEvent]
98 assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreateEvent]
99
99
100 with EventCatcher() as event_catcher:
100 with EventCatcher() as event_catcher:
101 RepoModel().delete(repo)
101 RepoModel().delete(repo)
102 assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeleteEvent]
102 assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeleteEvent]
103
103
104
104
105 def test_pull_fires_events(scm_extras):
105 def test_pull_fires_events(scm_extras):
106 with EventCatcher() as event_catcher:
106 with EventCatcher() as event_catcher:
107 hooks_base.pre_push(scm_extras)
107 hooks_base.pre_push(scm_extras)
108 assert event_catcher.events_types == [RepoPrePushEvent]
108 assert event_catcher.events_types == [RepoPrePushEvent]
109
109
110 with EventCatcher() as event_catcher:
110 with EventCatcher() as event_catcher:
111 hooks_base.post_push(scm_extras)
111 hooks_base.post_push(scm_extras)
112 assert event_catcher.events_types == [RepoPushEvent]
112 assert event_catcher.events_types == [RepoPushEvent]
113
113
114
114
115 def test_push_fires_events(scm_extras):
115 def test_push_fires_events(scm_extras):
116 with EventCatcher() as event_catcher:
116 with EventCatcher() as event_catcher:
117 hooks_base.pre_pull(scm_extras)
117 hooks_base.pre_pull(scm_extras)
118 assert event_catcher.events_types == [RepoPrePullEvent]
118 assert event_catcher.events_types == [RepoPrePullEvent]
119
119
120 with EventCatcher() as event_catcher:
120 with EventCatcher() as event_catcher:
121 hooks_base.post_pull(scm_extras)
121 hooks_base.post_pull(scm_extras)
122 assert event_catcher.events_types == [RepoPullEvent]
122 assert event_catcher.events_types == [RepoPullEvent]
123
123
@@ -1,55 +1,55 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import pytest
22 import pytest
23 from rhodecode import events
23 from rhodecode import events
24 from rhodecode.lib.utils2 import AttributeDict
24 from rhodecode.lib.utils2 import AttributeDict
25
25
26
26
27 @pytest.fixture
27 @pytest.fixture()
28 def repo_push_event(backend, user_regular):
28 def repo_push_event(backend, user_regular):
29 commits = [
29 commits = [
30 {'message': 'ancestor commit fixes #15'},
30 {'message': 'ancestor commit fixes #15'},
31 {'message': 'quick fixes'},
31 {'message': 'quick fixes'},
32 {'message': 'change that fixes #41, #2'},
32 {'message': 'change that fixes #41, #2'},
33 {'message': 'this is because 5b23c3532 broke stuff'},
33 {'message': 'this is because 5b23c3532 broke stuff'},
34 {'message': 'last commit'},
34 {'message': 'last commit'},
35 ]
35 ]
36 commit_ids = backend.create_master_repo(commits).values()
36 commit_ids = backend.create_master_repo(commits).values()
37 repo = backend.create_repo()
37 repo = backend.create_repo()
38 scm_extras = AttributeDict({
38 scm_extras = AttributeDict({
39 'ip': '127.0.0.1',
39 'ip': '127.0.0.1',
40 'username': user_regular.username,
40 'username': user_regular.username,
41 'user_id': user_regular.user_id,
41 'user_id': user_regular.user_id,
42 'action': '',
42 'action': '',
43 'repository': repo.repo_name,
43 'repository': repo.repo_name,
44 'scm': repo.scm_instance().alias,
44 'scm': repo.scm_instance().alias,
45 'config': '',
45 'config': '',
46 'repo_store': '',
46 'repo_store': '',
47 'server_url': 'http://example.com',
47 'server_url': 'http://example.com',
48 'make_lock': None,
48 'make_lock': None,
49 'locked_by': [None],
49 'locked_by': [None],
50 'commit_ids': commit_ids,
50 'commit_ids': commit_ids,
51 })
51 })
52
52
53 return events.RepoPushEvent(repo_name=repo.repo_name,
53 return events.RepoPushEvent(repo_name=repo.repo_name,
54 pushed_commit_ids=commit_ids,
54 pushed_commit_ids=commit_ids,
55 extras=scm_extras)
55 extras=scm_extras)
@@ -1,216 +1,216 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import pytest
22 import pytest
23
23
24 from rhodecode import events
24 from rhodecode import events
25 from rhodecode.tests.fixture import Fixture
25 from rhodecode.tests.fixture import Fixture
26 from rhodecode.model.db import Session, Integration
26 from rhodecode.model.db import Session, Integration
27 from rhodecode.model.integration import IntegrationModel
27 from rhodecode.model.integration import IntegrationModel
28
28
29
29
30 class TestDeleteScopesDeletesIntegrations(object):
30 class TestDeleteScopesDeletesIntegrations(object):
31 def test_delete_repo_with_integration_deletes_integration(
31 def test_delete_repo_with_integration_deletes_integration(
32 self, repo_integration_stub):
32 self, repo_integration_stub):
33
33
34 Session().delete(repo_integration_stub.repo)
34 Session().delete(repo_integration_stub.repo)
35 Session().commit()
35 Session().commit()
36 Session().expire_all()
36 Session().expire_all()
37 integration = Integration.get(repo_integration_stub.integration_id)
37 integration = Integration.get(repo_integration_stub.integration_id)
38 assert integration is None
38 assert integration is None
39
39
40 def test_delete_repo_group_with_integration_deletes_integration(
40 def test_delete_repo_group_with_integration_deletes_integration(
41 self, repogroup_integration_stub):
41 self, repogroup_integration_stub):
42
42
43 Session().delete(repogroup_integration_stub.repo_group)
43 Session().delete(repogroup_integration_stub.repo_group)
44 Session().commit()
44 Session().commit()
45 Session().expire_all()
45 Session().expire_all()
46 integration = Integration.get(repogroup_integration_stub.integration_id)
46 integration = Integration.get(repogroup_integration_stub.integration_id)
47 assert integration is None
47 assert integration is None
48
48
49
49
50 @pytest.fixture
50 @pytest.fixture()
51 def integration_repos(request, StubIntegrationType, stub_integration_settings):
51 def integration_repos(request, StubIntegrationType, stub_integration_settings):
52 """
52 """
53 Create repositories and integrations for testing, and destroy them after
53 Create repositories and integrations for testing, and destroy them after
54
54
55 Structure:
55 Structure:
56 root_repo
56 root_repo
57 parent_group/
57 parent_group/
58 parent_repo
58 parent_repo
59 child_group/
59 child_group/
60 child_repo
60 child_repo
61 other_group/
61 other_group/
62 other_repo
62 other_repo
63 """
63 """
64 fixture = Fixture()
64 fixture = Fixture()
65
65
66
66
67 parent_group_id = 'int_test_parent_group_%s' % time.time()
67 parent_group_id = 'int_test_parent_group_%s' % time.time()
68 parent_group = fixture.create_repo_group(parent_group_id)
68 parent_group = fixture.create_repo_group(parent_group_id)
69
69
70 other_group_id = 'int_test_other_group_%s' % time.time()
70 other_group_id = 'int_test_other_group_%s' % time.time()
71 other_group = fixture.create_repo_group(other_group_id)
71 other_group = fixture.create_repo_group(other_group_id)
72
72
73 child_group_id = (
73 child_group_id = (
74 parent_group_id + '/' + 'int_test_child_group_%s' % time.time())
74 parent_group_id + '/' + 'int_test_child_group_%s' % time.time())
75 child_group = fixture.create_repo_group(child_group_id)
75 child_group = fixture.create_repo_group(child_group_id)
76
76
77 parent_repo_id = 'int_test_parent_repo_%s' % time.time()
77 parent_repo_id = 'int_test_parent_repo_%s' % time.time()
78 parent_repo = fixture.create_repo(parent_repo_id, repo_group=parent_group)
78 parent_repo = fixture.create_repo(parent_repo_id, repo_group=parent_group)
79
79
80 child_repo_id = 'int_test_child_repo_%s' % time.time()
80 child_repo_id = 'int_test_child_repo_%s' % time.time()
81 child_repo = fixture.create_repo(child_repo_id, repo_group=child_group)
81 child_repo = fixture.create_repo(child_repo_id, repo_group=child_group)
82
82
83 other_repo_id = 'int_test_other_repo_%s' % time.time()
83 other_repo_id = 'int_test_other_repo_%s' % time.time()
84 other_repo = fixture.create_repo(other_repo_id, repo_group=other_group)
84 other_repo = fixture.create_repo(other_repo_id, repo_group=other_group)
85
85
86 root_repo_id = 'int_test_repo_root_%s' % time.time()
86 root_repo_id = 'int_test_repo_root_%s' % time.time()
87 root_repo = fixture.create_repo(root_repo_id)
87 root_repo = fixture.create_repo(root_repo_id)
88
88
89 integrations = {}
89 integrations = {}
90 for name, repo, repo_group, child_repos_only in [
90 for name, repo, repo_group, child_repos_only in [
91 ('global', None, None, None),
91 ('global', None, None, None),
92 ('root_repos', None, None, True),
92 ('root_repos', None, None, True),
93 ('parent_repo', parent_repo, None, None),
93 ('parent_repo', parent_repo, None, None),
94 ('child_repo', child_repo, None, None),
94 ('child_repo', child_repo, None, None),
95 ('other_repo', other_repo, None, None),
95 ('other_repo', other_repo, None, None),
96 ('root_repo', root_repo, None, None),
96 ('root_repo', root_repo, None, None),
97 ('parent_group', None, parent_group, True),
97 ('parent_group', None, parent_group, True),
98 ('parent_group_recursive', None, parent_group, False),
98 ('parent_group_recursive', None, parent_group, False),
99 ('child_group', None, child_group, True),
99 ('child_group', None, child_group, True),
100 ('child_group_recursive', None, child_group, False),
100 ('child_group_recursive', None, child_group, False),
101 ('other_group', None, other_group, True),
101 ('other_group', None, other_group, True),
102 ('other_group_recursive', None, other_group, False),
102 ('other_group_recursive', None, other_group, False),
103 ]:
103 ]:
104 integrations[name] = IntegrationModel().create(
104 integrations[name] = IntegrationModel().create(
105 StubIntegrationType, settings=stub_integration_settings,
105 StubIntegrationType, settings=stub_integration_settings,
106 enabled=True, name='test %s integration' % name,
106 enabled=True, name='test %s integration' % name,
107 repo=repo, repo_group=repo_group, child_repos_only=child_repos_only)
107 repo=repo, repo_group=repo_group, child_repos_only=child_repos_only)
108
108
109 Session().commit()
109 Session().commit()
110
110
111 def _cleanup():
111 def _cleanup():
112 for integration in integrations.values():
112 for integration in integrations.values():
113 Session.delete(integration)
113 Session.delete(integration)
114
114
115 fixture.destroy_repo(root_repo)
115 fixture.destroy_repo(root_repo)
116 fixture.destroy_repo(child_repo)
116 fixture.destroy_repo(child_repo)
117 fixture.destroy_repo(parent_repo)
117 fixture.destroy_repo(parent_repo)
118 fixture.destroy_repo(other_repo)
118 fixture.destroy_repo(other_repo)
119 fixture.destroy_repo_group(child_group)
119 fixture.destroy_repo_group(child_group)
120 fixture.destroy_repo_group(parent_group)
120 fixture.destroy_repo_group(parent_group)
121 fixture.destroy_repo_group(other_group)
121 fixture.destroy_repo_group(other_group)
122
122
123 request.addfinalizer(_cleanup)
123 request.addfinalizer(_cleanup)
124
124
125 return {
125 return {
126 'integrations': integrations,
126 'integrations': integrations,
127 'repos': {
127 'repos': {
128 'root_repo': root_repo,
128 'root_repo': root_repo,
129 'other_repo': other_repo,
129 'other_repo': other_repo,
130 'parent_repo': parent_repo,
130 'parent_repo': parent_repo,
131 'child_repo': child_repo,
131 'child_repo': child_repo,
132 }
132 }
133 }
133 }
134
134
135
135
136 def test_enabled_integration_repo_scopes(integration_repos):
136 def test_enabled_integration_repo_scopes(integration_repos):
137 integrations = integration_repos['integrations']
137 integrations = integration_repos['integrations']
138 repos = integration_repos['repos']
138 repos = integration_repos['repos']
139
139
140 triggered_integrations = IntegrationModel().get_for_event(
140 triggered_integrations = IntegrationModel().get_for_event(
141 events.RepoEvent(repos['root_repo']))
141 events.RepoEvent(repos['root_repo']))
142
142
143 assert triggered_integrations == [
143 assert triggered_integrations == [
144 integrations['global'],
144 integrations['global'],
145 integrations['root_repos'],
145 integrations['root_repos'],
146 integrations['root_repo'],
146 integrations['root_repo'],
147 ]
147 ]
148
148
149 triggered_integrations = IntegrationModel().get_for_event(
149 triggered_integrations = IntegrationModel().get_for_event(
150 events.RepoEvent(repos['other_repo']))
150 events.RepoEvent(repos['other_repo']))
151
151
152 assert triggered_integrations == [
152 assert triggered_integrations == [
153 integrations['global'],
153 integrations['global'],
154 integrations['other_group'],
154 integrations['other_group'],
155 integrations['other_group_recursive'],
155 integrations['other_group_recursive'],
156 integrations['other_repo'],
156 integrations['other_repo'],
157 ]
157 ]
158
158
159 triggered_integrations = IntegrationModel().get_for_event(
159 triggered_integrations = IntegrationModel().get_for_event(
160 events.RepoEvent(repos['parent_repo']))
160 events.RepoEvent(repos['parent_repo']))
161
161
162 assert triggered_integrations == [
162 assert triggered_integrations == [
163 integrations['global'],
163 integrations['global'],
164 integrations['parent_group'],
164 integrations['parent_group'],
165 integrations['parent_group_recursive'],
165 integrations['parent_group_recursive'],
166 integrations['parent_repo'],
166 integrations['parent_repo'],
167 ]
167 ]
168
168
169 triggered_integrations = IntegrationModel().get_for_event(
169 triggered_integrations = IntegrationModel().get_for_event(
170 events.RepoEvent(repos['child_repo']))
170 events.RepoEvent(repos['child_repo']))
171
171
172 assert triggered_integrations == [
172 assert triggered_integrations == [
173 integrations['global'],
173 integrations['global'],
174 integrations['child_group'],
174 integrations['child_group'],
175 integrations['parent_group_recursive'],
175 integrations['parent_group_recursive'],
176 integrations['child_group_recursive'],
176 integrations['child_group_recursive'],
177 integrations['child_repo'],
177 integrations['child_repo'],
178 ]
178 ]
179
179
180
180
181 def test_disabled_integration_repo_scopes(integration_repos):
181 def test_disabled_integration_repo_scopes(integration_repos):
182 integrations = integration_repos['integrations']
182 integrations = integration_repos['integrations']
183 repos = integration_repos['repos']
183 repos = integration_repos['repos']
184
184
185 for integration in integrations.values():
185 for integration in integrations.values():
186 integration.enabled = False
186 integration.enabled = False
187 Session().commit()
187 Session().commit()
188
188
189 triggered_integrations = IntegrationModel().get_for_event(
189 triggered_integrations = IntegrationModel().get_for_event(
190 events.RepoEvent(repos['root_repo']))
190 events.RepoEvent(repos['root_repo']))
191
191
192 assert triggered_integrations == []
192 assert triggered_integrations == []
193
193
194 triggered_integrations = IntegrationModel().get_for_event(
194 triggered_integrations = IntegrationModel().get_for_event(
195 events.RepoEvent(repos['parent_repo']))
195 events.RepoEvent(repos['parent_repo']))
196
196
197 assert triggered_integrations == []
197 assert triggered_integrations == []
198
198
199 triggered_integrations = IntegrationModel().get_for_event(
199 triggered_integrations = IntegrationModel().get_for_event(
200 events.RepoEvent(repos['child_repo']))
200 events.RepoEvent(repos['child_repo']))
201
201
202 assert triggered_integrations == []
202 assert triggered_integrations == []
203
203
204 triggered_integrations = IntegrationModel().get_for_event(
204 triggered_integrations = IntegrationModel().get_for_event(
205 events.RepoEvent(repos['other_repo']))
205 events.RepoEvent(repos['other_repo']))
206
206
207 assert triggered_integrations == []
207 assert triggered_integrations == []
208
208
209
209
210 def test_enabled_non_repo_integrations(integration_repos):
210 def test_enabled_non_repo_integrations(integration_repos):
211 integrations = integration_repos['integrations']
211 integrations = integration_repos['integrations']
212
212
213 triggered_integrations = IntegrationModel().get_for_event(
213 triggered_integrations = IntegrationModel().get_for_event(
214 events.UserPreCreate({}))
214 events.UserPreCreate({}))
215
215
216 assert triggered_integrations == [integrations['global']]
216 assert triggered_integrations == [integrations['global']]
@@ -1,66 +1,66 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22 from mock import patch
22 from mock import patch
23
23
24 from rhodecode import events
24 from rhodecode import events
25 from rhodecode.model.db import Session, Integration
25 from rhodecode.model.db import Session, Integration
26 from rhodecode.integrations.types.slack import SlackIntegrationType
26 from rhodecode.integrations.types.slack import SlackIntegrationType
27
27
28
28
29 @pytest.fixture
29 @pytest.fixture()
30 def slack_settings():
30 def slack_settings():
31 return {
31 return {
32 "service": "mock://slackintegration",
32 "service": "mock://slackintegration",
33 "events": [
33 "events": [
34 "pullrequest-create",
34 "pullrequest-create",
35 "repo-push",
35 "repo-push",
36 ],
36 ],
37 "channel": "#testing",
37 "channel": "#testing",
38 "icon_emoji": ":recycle:",
38 "icon_emoji": ":recycle:",
39 "username": "rhodecode-test"
39 "username": "rhodecode-test"
40 }
40 }
41
41
42
42
43 @pytest.fixture
43 @pytest.fixture()
44 def slack_integration(request, app, slack_settings):
44 def slack_integration(request, app, slack_settings):
45 integration = Integration()
45 integration = Integration()
46 integration.name = 'test slack integration'
46 integration.name = 'test slack integration'
47 integration.enabled = True
47 integration.enabled = True
48 integration.integration_type = SlackIntegrationType.key
48 integration.integration_type = SlackIntegrationType.key
49 integration.settings = slack_settings
49 integration.settings = slack_settings
50 Session().add(integration)
50 Session().add(integration)
51 Session().commit()
51 Session().commit()
52 request.addfinalizer(lambda: Session().delete(integration))
52 request.addfinalizer(lambda: Session().delete(integration))
53 return integration
53 return integration
54
54
55
55
56 def test_slack_push(slack_integration, repo_push_event):
56 def test_slack_push(slack_integration, repo_push_event):
57 with patch('rhodecode.integrations.types.slack.post_text_to_slack') as call:
57 with patch('rhodecode.integrations.types.slack.post_text_to_slack') as call:
58 events.trigger(repo_push_event)
58 events.trigger(repo_push_event)
59 assert 'pushed to' in call.call_args[0][1]
59 assert 'pushed to' in call.call_args[0][1]
60
60
61 slack_integration.settings['events'] = []
61 slack_integration.settings['events'] = []
62 Session().commit()
62 Session().commit()
63
63
64 with patch('rhodecode.integrations.types.slack.post_text_to_slack') as call:
64 with patch('rhodecode.integrations.types.slack.post_text_to_slack') as call:
65 events.trigger(repo_push_event)
65 events.trigger(repo_push_event)
66 assert not call.call_args
66 assert not call.call_args
@@ -1,135 +1,135 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode import events
23 from rhodecode import events
24 from rhodecode.lib.utils2 import AttributeDict
24 from rhodecode.lib.utils2 import AttributeDict
25 from rhodecode.integrations.types.webhook import WebhookDataHandler
25 from rhodecode.integrations.types.webhook import WebhookDataHandler
26
26
27
27
28 @pytest.fixture
28 @pytest.fixture()
29 def base_data():
29 def base_data():
30 return {
30 return {
31 'name': 'event',
31 'name': 'event',
32 'repo': {
32 'repo': {
33 'repo_name': 'foo',
33 'repo_name': 'foo',
34 'repo_type': 'hg',
34 'repo_type': 'hg',
35 'repo_id': '12',
35 'repo_id': '12',
36 'url': 'http://repo.url/foo',
36 'url': 'http://repo.url/foo',
37 'extra_fields': {},
37 'extra_fields': {},
38 },
38 },
39 'actor': {
39 'actor': {
40 'username': 'actor_name',
40 'username': 'actor_name',
41 'user_id': 1
41 'user_id': 1
42 }
42 }
43 }
43 }
44
44
45
45
46 def test_webhook_parse_url_invalid_event():
46 def test_webhook_parse_url_invalid_event():
47 template_url = 'http://server.com/${repo_name}/build'
47 template_url = 'http://server.com/${repo_name}/build'
48 handler = WebhookDataHandler(
48 handler = WebhookDataHandler(
49 template_url, {'exmaple-header': 'header-values'})
49 template_url, {'exmaple-header': 'header-values'})
50 event = events.RepoDeleteEvent('')
50 event = events.RepoDeleteEvent('')
51 with pytest.raises(ValueError) as err:
51 with pytest.raises(ValueError) as err:
52 handler(event, {})
52 handler(event, {})
53
53
54 err = str(err.value)
54 err = str(err.value)
55 assert err.startswith(
55 assert err.startswith(
56 'event type `%s` not in supported list' % event.__class__)
56 'event type `%s` not in supported list' % event.__class__)
57
57
58
58
59 @pytest.mark.parametrize('template,expected_urls', [
59 @pytest.mark.parametrize('template,expected_urls', [
60 ('http://server.com/${repo_name}/build',
60 ('http://server.com/${repo_name}/build',
61 ['http://server.com/foo/build']),
61 ['http://server.com/foo/build']),
62 ('http://server.com/${repo_name}/${repo_type}',
62 ('http://server.com/${repo_name}/${repo_type}',
63 ['http://server.com/foo/hg']),
63 ['http://server.com/foo/hg']),
64 ('http://${server}.com/${repo_name}/${repo_id}',
64 ('http://${server}.com/${repo_name}/${repo_id}',
65 ['http://${server}.com/foo/12']),
65 ['http://${server}.com/foo/12']),
66 ('http://server.com/${branch}/build',
66 ('http://server.com/${branch}/build',
67 ['http://server.com/${branch}/build']),
67 ['http://server.com/${branch}/build']),
68 ])
68 ])
69 def test_webook_parse_url_for_create_event(base_data, template, expected_urls):
69 def test_webook_parse_url_for_create_event(base_data, template, expected_urls):
70 headers = {'exmaple-header': 'header-values'}
70 headers = {'exmaple-header': 'header-values'}
71 handler = WebhookDataHandler(template, headers)
71 handler = WebhookDataHandler(template, headers)
72 urls = handler(events.RepoCreateEvent(''), base_data)
72 urls = handler(events.RepoCreateEvent(''), base_data)
73 assert urls == [
73 assert urls == [
74 (url, headers, base_data) for url in expected_urls]
74 (url, headers, base_data) for url in expected_urls]
75
75
76
76
77 @pytest.mark.parametrize('template,expected_urls', [
77 @pytest.mark.parametrize('template,expected_urls', [
78 ('http://server.com/${repo_name}/${pull_request_id}',
78 ('http://server.com/${repo_name}/${pull_request_id}',
79 ['http://server.com/foo/999']),
79 ['http://server.com/foo/999']),
80 ('http://server.com/${repo_name}/${pull_request_url}',
80 ('http://server.com/${repo_name}/${pull_request_url}',
81 ['http://server.com/foo/http%3A//pr-url.com']),
81 ['http://server.com/foo/http%3A//pr-url.com']),
82 ('http://server.com/${repo_name}/${pull_request_url}/?TITLE=${pull_request_title}',
82 ('http://server.com/${repo_name}/${pull_request_url}/?TITLE=${pull_request_title}',
83 ['http://server.com/foo/http%3A//pr-url.com/?TITLE=example-pr-title%20Ticket%20%23123']),
83 ['http://server.com/foo/http%3A//pr-url.com/?TITLE=example-pr-title%20Ticket%20%23123']),
84 ('http://server.com/${repo_name}/?SHADOW_URL=${pull_request_shadow_url}',
84 ('http://server.com/${repo_name}/?SHADOW_URL=${pull_request_shadow_url}',
85 ['http://server.com/foo/?SHADOW_URL=http%3A//pr-url.com/repository']),
85 ['http://server.com/foo/?SHADOW_URL=http%3A//pr-url.com/repository']),
86 ])
86 ])
87 def test_webook_parse_url_for_pull_request_event(base_data, template, expected_urls):
87 def test_webook_parse_url_for_pull_request_event(base_data, template, expected_urls):
88
88
89 base_data['pullrequest'] = {
89 base_data['pullrequest'] = {
90 'pull_request_id': 999,
90 'pull_request_id': 999,
91 'url': 'http://pr-url.com',
91 'url': 'http://pr-url.com',
92 'title': 'example-pr-title Ticket #123',
92 'title': 'example-pr-title Ticket #123',
93 'commits_uid': 'abcdefg1234',
93 'commits_uid': 'abcdefg1234',
94 'shadow_url': 'http://pr-url.com/repository'
94 'shadow_url': 'http://pr-url.com/repository'
95 }
95 }
96 headers = {'exmaple-header': 'header-values'}
96 headers = {'exmaple-header': 'header-values'}
97 handler = WebhookDataHandler(template, headers)
97 handler = WebhookDataHandler(template, headers)
98 urls = handler(events.PullRequestCreateEvent(
98 urls = handler(events.PullRequestCreateEvent(
99 AttributeDict({'target_repo': 'foo'})), base_data)
99 AttributeDict({'target_repo': 'foo'})), base_data)
100 assert urls == [
100 assert urls == [
101 (url, headers, base_data) for url in expected_urls]
101 (url, headers, base_data) for url in expected_urls]
102
102
103
103
104 @pytest.mark.parametrize('template,expected_urls', [
104 @pytest.mark.parametrize('template,expected_urls', [
105 ('http://server.com/${branch}/build',
105 ('http://server.com/${branch}/build',
106 ['http://server.com/stable/build',
106 ['http://server.com/stable/build',
107 'http://server.com/dev/build']),
107 'http://server.com/dev/build']),
108 ('http://server.com/${branch}/${commit_id}',
108 ('http://server.com/${branch}/${commit_id}',
109 ['http://server.com/stable/stable-xxx',
109 ['http://server.com/stable/stable-xxx',
110 'http://server.com/stable/stable-yyy',
110 'http://server.com/stable/stable-yyy',
111 'http://server.com/dev/dev-xxx',
111 'http://server.com/dev/dev-xxx',
112 'http://server.com/dev/dev-yyy']),
112 'http://server.com/dev/dev-yyy']),
113 ('http://server.com/${branch_head}',
113 ('http://server.com/${branch_head}',
114 ['http://server.com/stable-yyy',
114 ['http://server.com/stable-yyy',
115 'http://server.com/dev-yyy']),
115 'http://server.com/dev-yyy']),
116 ('http://server.com/${commit_id}',
116 ('http://server.com/${commit_id}',
117 ['http://server.com/stable-xxx',
117 ['http://server.com/stable-xxx',
118 'http://server.com/stable-yyy',
118 'http://server.com/stable-yyy',
119 'http://server.com/dev-xxx',
119 'http://server.com/dev-xxx',
120 'http://server.com/dev-yyy']),
120 'http://server.com/dev-yyy']),
121 ])
121 ])
122 def test_webook_parse_url_for_push_event(
122 def test_webook_parse_url_for_push_event(
123 baseapp, repo_push_event, base_data, template, expected_urls):
123 baseapp, repo_push_event, base_data, template, expected_urls):
124 base_data['push'] = {
124 base_data['push'] = {
125 'branches': [{'name': 'stable'}, {'name': 'dev'}],
125 'branches': [{'name': 'stable'}, {'name': 'dev'}],
126 'commits': [{'branch': 'stable', 'raw_id': 'stable-xxx'},
126 'commits': [{'branch': 'stable', 'raw_id': 'stable-xxx'},
127 {'branch': 'stable', 'raw_id': 'stable-yyy'},
127 {'branch': 'stable', 'raw_id': 'stable-yyy'},
128 {'branch': 'dev', 'raw_id': 'dev-xxx'},
128 {'branch': 'dev', 'raw_id': 'dev-xxx'},
129 {'branch': 'dev', 'raw_id': 'dev-yyy'}]
129 {'branch': 'dev', 'raw_id': 'dev-yyy'}]
130 }
130 }
131 headers = {'exmaple-header': 'header-values'}
131 headers = {'exmaple-header': 'header-values'}
132 handler = WebhookDataHandler(template, headers)
132 handler = WebhookDataHandler(template, headers)
133 urls = handler(repo_push_event, base_data)
133 urls = handler(repo_push_event, base_data)
134 assert urls == [
134 assert urls == [
135 (url, headers, base_data) for url in expected_urls]
135 (url, headers, base_data) for url in expected_urls]
@@ -1,189 +1,189 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.lib.auth import _RhodeCodeCryptoBCrypt
24 from rhodecode.lib.auth import _RhodeCodeCryptoBCrypt
25 from rhodecode.authentication.base import RhodeCodeAuthPluginBase
25 from rhodecode.authentication.base import RhodeCodeAuthPluginBase
26 from rhodecode.authentication.plugins.auth_ldap import RhodeCodeAuthPlugin
26 from rhodecode.authentication.plugins.auth_ldap import RhodeCodeAuthPlugin
27 from rhodecode.model import db
27 from rhodecode.model import db
28
28
29
29
30 class RcTestAuthPlugin(RhodeCodeAuthPluginBase):
30 class RcTestAuthPlugin(RhodeCodeAuthPluginBase):
31
31
32 def name(self):
32 def name(self):
33 return u'stub_auth'
33 return u'stub_auth'
34
34
35
35
36 def test_authenticate_returns_from_auth(stub_auth_data):
36 def test_authenticate_returns_from_auth(stub_auth_data):
37 plugin = RcTestAuthPlugin('stub_id')
37 plugin = RcTestAuthPlugin('stub_id')
38 with mock.patch.object(plugin, 'auth') as auth_mock:
38 with mock.patch.object(plugin, 'auth') as auth_mock:
39 auth_mock.return_value = stub_auth_data
39 auth_mock.return_value = stub_auth_data
40 result = plugin._authenticate(mock.Mock(), 'test', 'password', {})
40 result = plugin._authenticate(mock.Mock(), 'test', 'password', {})
41 assert stub_auth_data == result
41 assert stub_auth_data == result
42
42
43
43
44 def test_authenticate_returns_empty_auth_data():
44 def test_authenticate_returns_empty_auth_data():
45 auth_data = {}
45 auth_data = {}
46 plugin = RcTestAuthPlugin('stub_id')
46 plugin = RcTestAuthPlugin('stub_id')
47 with mock.patch.object(plugin, 'auth') as auth_mock:
47 with mock.patch.object(plugin, 'auth') as auth_mock:
48 auth_mock.return_value = auth_data
48 auth_mock.return_value = auth_data
49 result = plugin._authenticate(mock.Mock(), 'test', 'password', {})
49 result = plugin._authenticate(mock.Mock(), 'test', 'password', {})
50 assert auth_data == result
50 assert auth_data == result
51
51
52
52
53 def test_authenticate_skips_hash_migration_if_mismatch(stub_auth_data):
53 def test_authenticate_skips_hash_migration_if_mismatch(stub_auth_data):
54 stub_auth_data['_hash_migrate'] = 'new-hash'
54 stub_auth_data['_hash_migrate'] = 'new-hash'
55 plugin = RcTestAuthPlugin('stub_id')
55 plugin = RcTestAuthPlugin('stub_id')
56 with mock.patch.object(plugin, 'auth') as auth_mock:
56 with mock.patch.object(plugin, 'auth') as auth_mock:
57 auth_mock.return_value = stub_auth_data
57 auth_mock.return_value = stub_auth_data
58 result = plugin._authenticate(mock.Mock(), 'test', 'password', {})
58 result = plugin._authenticate(mock.Mock(), 'test', 'password', {})
59
59
60 user = db.User.get_by_username(stub_auth_data['username'])
60 user = db.User.get_by_username(stub_auth_data['username'])
61 assert user.password != 'new-hash'
61 assert user.password != 'new-hash'
62 assert result == stub_auth_data
62 assert result == stub_auth_data
63
63
64
64
65 def test_authenticate_migrates_to_new_hash(stub_auth_data):
65 def test_authenticate_migrates_to_new_hash(stub_auth_data):
66 new_password = b'new-password'
66 new_password = b'new-password'
67 new_hash = _RhodeCodeCryptoBCrypt().hash_create(new_password)
67 new_hash = _RhodeCodeCryptoBCrypt().hash_create(new_password)
68 stub_auth_data['_hash_migrate'] = new_hash
68 stub_auth_data['_hash_migrate'] = new_hash
69 plugin = RcTestAuthPlugin('stub_id')
69 plugin = RcTestAuthPlugin('stub_id')
70 with mock.patch.object(plugin, 'auth') as auth_mock:
70 with mock.patch.object(plugin, 'auth') as auth_mock:
71 auth_mock.return_value = stub_auth_data
71 auth_mock.return_value = stub_auth_data
72 result = plugin._authenticate(
72 result = plugin._authenticate(
73 mock.Mock(), stub_auth_data['username'], new_password, {})
73 mock.Mock(), stub_auth_data['username'], new_password, {})
74
74
75 user = db.User.get_by_username(stub_auth_data['username'])
75 user = db.User.get_by_username(stub_auth_data['username'])
76 assert user.password == new_hash
76 assert user.password == new_hash
77 assert result == stub_auth_data
77 assert result == stub_auth_data
78
78
79
79
80 @pytest.fixture
80 @pytest.fixture()
81 def stub_auth_data(user_util):
81 def stub_auth_data(user_util):
82 user = user_util.create_user()
82 user = user_util.create_user()
83 data = {
83 data = {
84 'username': user.username,
84 'username': user.username,
85 'password': 'password',
85 'password': 'password',
86 'email': 'test@example.org',
86 'email': 'test@example.org',
87 'firstname': 'John',
87 'firstname': 'John',
88 'lastname': 'Smith',
88 'lastname': 'Smith',
89 'groups': [],
89 'groups': [],
90 'active': True,
90 'active': True,
91 'admin': False,
91 'admin': False,
92 'extern_name': 'test',
92 'extern_name': 'test',
93 'extern_type': 'ldap',
93 'extern_type': 'ldap',
94 'active_from_extern': True
94 'active_from_extern': True
95 }
95 }
96 return data
96 return data
97
97
98
98
99 class TestRhodeCodeAuthPlugin(object):
99 class TestRhodeCodeAuthPlugin(object):
100 def setup_method(self, method):
100 def setup_method(self, method):
101 self.finalizers = []
101 self.finalizers = []
102 self.user = mock.Mock()
102 self.user = mock.Mock()
103 self.user.username = 'test'
103 self.user.username = 'test'
104 self.user.password = 'old-password'
104 self.user.password = 'old-password'
105 self.fake_auth = {
105 self.fake_auth = {
106 'username': 'test',
106 'username': 'test',
107 'password': 'test',
107 'password': 'test',
108 'email': 'test@example.org',
108 'email': 'test@example.org',
109 'firstname': 'John',
109 'firstname': 'John',
110 'lastname': 'Smith',
110 'lastname': 'Smith',
111 'groups': [],
111 'groups': [],
112 'active': True,
112 'active': True,
113 'admin': False,
113 'admin': False,
114 'extern_name': 'test',
114 'extern_name': 'test',
115 'extern_type': 'ldap',
115 'extern_type': 'ldap',
116 'active_from_extern': True
116 'active_from_extern': True
117 }
117 }
118
118
119 def teardown_method(self, method):
119 def teardown_method(self, method):
120 if self.finalizers:
120 if self.finalizers:
121 for finalizer in self.finalizers:
121 for finalizer in self.finalizers:
122 finalizer()
122 finalizer()
123 self.finalizers = []
123 self.finalizers = []
124
124
125 def test_fake_password_is_created_for_the_new_user(self):
125 def test_fake_password_is_created_for_the_new_user(self):
126 self._patch()
126 self._patch()
127 auth_plugin = RhodeCodeAuthPlugin('stub_id')
127 auth_plugin = RhodeCodeAuthPlugin('stub_id')
128 auth_plugin._authenticate(self.user, 'test', 'test', [])
128 auth_plugin._authenticate(self.user, 'test', 'test', [])
129 self.password_generator_mock.assert_called_once_with(length=16)
129 self.password_generator_mock.assert_called_once_with(length=16)
130 create_user_kwargs = self.create_user_mock.call_args[1]
130 create_user_kwargs = self.create_user_mock.call_args[1]
131 assert create_user_kwargs['password'] == 'new-password'
131 assert create_user_kwargs['password'] == 'new-password'
132
132
133 def test_fake_password_is_not_created_for_the_existing_user(self):
133 def test_fake_password_is_not_created_for_the_existing_user(self):
134 self._patch()
134 self._patch()
135 self.get_user_mock.return_value = self.user
135 self.get_user_mock.return_value = self.user
136 auth_plugin = RhodeCodeAuthPlugin('stub_id')
136 auth_plugin = RhodeCodeAuthPlugin('stub_id')
137 auth_plugin._authenticate(self.user, 'test', 'test', [])
137 auth_plugin._authenticate(self.user, 'test', 'test', [])
138 assert self.password_generator_mock.called is False
138 assert self.password_generator_mock.called is False
139 create_user_kwargs = self.create_user_mock.call_args[1]
139 create_user_kwargs = self.create_user_mock.call_args[1]
140 assert create_user_kwargs['password'] == self.user.password
140 assert create_user_kwargs['password'] == self.user.password
141
141
142 def _patch(self):
142 def _patch(self):
143 get_user_patch = mock.patch('rhodecode.model.db.User.get_by_username')
143 get_user_patch = mock.patch('rhodecode.model.db.User.get_by_username')
144 self.get_user_mock = get_user_patch.start()
144 self.get_user_mock = get_user_patch.start()
145 self.get_user_mock.return_value = None
145 self.get_user_mock.return_value = None
146 self.finalizers.append(get_user_patch.stop)
146 self.finalizers.append(get_user_patch.stop)
147
147
148 create_user_patch = mock.patch(
148 create_user_patch = mock.patch(
149 'rhodecode.model.user.UserModel.create_or_update')
149 'rhodecode.model.user.UserModel.create_or_update')
150 self.create_user_mock = create_user_patch.start()
150 self.create_user_mock = create_user_patch.start()
151 self.create_user_mock.return_value = None
151 self.create_user_mock.return_value = None
152 self.finalizers.append(create_user_patch.stop)
152 self.finalizers.append(create_user_patch.stop)
153
153
154 auth_patch = mock.patch.object(RhodeCodeAuthPlugin, 'auth')
154 auth_patch = mock.patch.object(RhodeCodeAuthPlugin, 'auth')
155 self.auth_mock = auth_patch.start()
155 self.auth_mock = auth_patch.start()
156 self.auth_mock.return_value = self.fake_auth
156 self.auth_mock.return_value = self.fake_auth
157 self.finalizers.append(auth_patch.stop)
157 self.finalizers.append(auth_patch.stop)
158
158
159 password_generator_patch = mock.patch(
159 password_generator_patch = mock.patch(
160 'rhodecode.lib.auth.PasswordGenerator.gen_password')
160 'rhodecode.lib.auth.PasswordGenerator.gen_password')
161 self.password_generator_mock = password_generator_patch.start()
161 self.password_generator_mock = password_generator_patch.start()
162 self.password_generator_mock.return_value = 'new-password'
162 self.password_generator_mock.return_value = 'new-password'
163 self.finalizers.append(password_generator_patch.stop)
163 self.finalizers.append(password_generator_patch.stop)
164
164
165
165
166 def test_missing_ldap():
166 def test_missing_ldap():
167 from rhodecode.model.validators import Missing
167 from rhodecode.model.validators import Missing
168
168
169 try:
169 try:
170 import ldap_not_existing
170 import ldap_not_existing
171 except ImportError:
171 except ImportError:
172 # means that python-ldap is not installed
172 # means that python-ldap is not installed
173 ldap_not_existing = Missing
173 ldap_not_existing = Missing
174
174
175 # missing is singleton
175 # missing is singleton
176 assert ldap_not_existing == Missing
176 assert ldap_not_existing == Missing
177
177
178
178
179 def test_import_ldap():
179 def test_import_ldap():
180 from rhodecode.model.validators import Missing
180 from rhodecode.model.validators import Missing
181
181
182 try:
182 try:
183 import ldap
183 import ldap
184 except ImportError:
184 except ImportError:
185 # means that python-ldap is not installed
185 # means that python-ldap is not installed
186 ldap = Missing
186 ldap = Missing
187
187
188 # missing is singleton
188 # missing is singleton
189 assert False is (ldap == Missing)
189 assert False is (ldap == Missing)
@@ -1,473 +1,473 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import base64
21 import base64
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.utils2 import AttributeDict
26 from rhodecode.lib.utils2 import AttributeDict
27 from rhodecode.tests.utils import CustomTestApp
27 from rhodecode.tests.utils import CustomTestApp
28
28
29 from rhodecode.lib.caching_query import FromCache
29 from rhodecode.lib.caching_query import FromCache
30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
31 from rhodecode.lib.middleware import simplevcs
31 from rhodecode.lib.middleware import simplevcs
32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
33 from rhodecode.lib.middleware.utils import scm_app_http
33 from rhodecode.lib.middleware.utils import scm_app_http
34 from rhodecode.model.db import User, _hash_key
34 from rhodecode.model.db import User, _hash_key
35 from rhodecode.model.meta import Session
35 from rhodecode.model.meta import Session
36 from rhodecode.tests import (
36 from rhodecode.tests import (
37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
38 from rhodecode.tests.lib.middleware import mock_scm_app
38 from rhodecode.tests.lib.middleware import mock_scm_app
39
39
40
40
41 class StubVCSController(simplevcs.SimpleVCS):
41 class StubVCSController(simplevcs.SimpleVCS):
42
42
43 SCM = 'hg'
43 SCM = 'hg'
44 stub_response_body = tuple()
44 stub_response_body = tuple()
45
45
46 def __init__(self, *args, **kwargs):
46 def __init__(self, *args, **kwargs):
47 super(StubVCSController, self).__init__(*args, **kwargs)
47 super(StubVCSController, self).__init__(*args, **kwargs)
48 self._action = 'pull'
48 self._action = 'pull'
49 self._is_shadow_repo_dir = True
49 self._is_shadow_repo_dir = True
50 self._name = HG_REPO
50 self._name = HG_REPO
51 self.set_repo_names(None)
51 self.set_repo_names(None)
52
52
53 @property
53 @property
54 def is_shadow_repo_dir(self):
54 def is_shadow_repo_dir(self):
55 return self._is_shadow_repo_dir
55 return self._is_shadow_repo_dir
56
56
57 def _get_repository_name(self, environ):
57 def _get_repository_name(self, environ):
58 return self._name
58 return self._name
59
59
60 def _get_action(self, environ):
60 def _get_action(self, environ):
61 return self._action
61 return self._action
62
62
63 def _create_wsgi_app(self, repo_path, repo_name, config):
63 def _create_wsgi_app(self, repo_path, repo_name, config):
64 def fake_app(environ, start_response):
64 def fake_app(environ, start_response):
65 headers = [
65 headers = [
66 ('Http-Accept', 'application/mercurial')
66 ('Http-Accept', 'application/mercurial')
67 ]
67 ]
68 start_response('200 OK', headers)
68 start_response('200 OK', headers)
69 return self.stub_response_body
69 return self.stub_response_body
70 return fake_app
70 return fake_app
71
71
72 def _create_config(self, extras, repo_name, scheme='http'):
72 def _create_config(self, extras, repo_name, scheme='http'):
73 return None
73 return None
74
74
75
75
76 @pytest.fixture
76 @pytest.fixture()
77 def vcscontroller(baseapp, config_stub, request_stub):
77 def vcscontroller(baseapp, config_stub, request_stub):
78 config_stub.testing_securitypolicy()
78 config_stub.testing_securitypolicy()
79 config_stub.include('rhodecode.authentication')
79 config_stub.include('rhodecode.authentication')
80 config_stub.include('rhodecode.authentication.plugins.auth_rhodecode')
80 config_stub.include('rhodecode.authentication.plugins.auth_rhodecode')
81 config_stub.include('rhodecode.authentication.plugins.auth_token')
81 config_stub.include('rhodecode.authentication.plugins.auth_token')
82
82
83 controller = StubVCSController(
83 controller = StubVCSController(
84 baseapp.config.get_settings(), request_stub.registry)
84 baseapp.config.get_settings(), request_stub.registry)
85 app = HttpsFixup(controller, baseapp.config.get_settings())
85 app = HttpsFixup(controller, baseapp.config.get_settings())
86 app = CustomTestApp(app)
86 app = CustomTestApp(app)
87
87
88 _remove_default_user_from_query_cache()
88 _remove_default_user_from_query_cache()
89
89
90 # Sanity checks that things are set up correctly
90 # Sanity checks that things are set up correctly
91 app.get('/' + HG_REPO, status=200)
91 app.get('/' + HG_REPO, status=200)
92
92
93 app.controller = controller
93 app.controller = controller
94 return app
94 return app
95
95
96
96
97 def _remove_default_user_from_query_cache():
97 def _remove_default_user_from_query_cache():
98 user = User.get_default_user(cache=True)
98 user = User.get_default_user(cache=True)
99 query = Session().query(User).filter(User.username == user.username)
99 query = Session().query(User).filter(User.username == user.username)
100 query = query.options(
100 query = query.options(
101 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
101 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
102 query.invalidate()
102 query.invalidate()
103 Session().expire(user)
103 Session().expire(user)
104
104
105
105
106 def test_handles_exceptions_during_permissions_checks(
106 def test_handles_exceptions_during_permissions_checks(
107 vcscontroller, disable_anonymous_user):
107 vcscontroller, disable_anonymous_user):
108 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
108 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
109 auth_password = base64.encodestring(user_and_pass).strip()
109 auth_password = base64.encodestring(user_and_pass).strip()
110 extra_environ = {
110 extra_environ = {
111 'AUTH_TYPE': 'Basic',
111 'AUTH_TYPE': 'Basic',
112 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
112 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
113 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
113 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
114 }
114 }
115
115
116 # Verify that things are hooked up correctly
116 # Verify that things are hooked up correctly
117 vcscontroller.get('/', status=200, extra_environ=extra_environ)
117 vcscontroller.get('/', status=200, extra_environ=extra_environ)
118
118
119 # Simulate trouble during permission checks
119 # Simulate trouble during permission checks
120 with mock.patch('rhodecode.model.db.User.get_by_username',
120 with mock.patch('rhodecode.model.db.User.get_by_username',
121 side_effect=Exception) as get_user:
121 side_effect=Exception) as get_user:
122 # Verify that a correct 500 is returned and check that the expected
122 # Verify that a correct 500 is returned and check that the expected
123 # code path was hit.
123 # code path was hit.
124 vcscontroller.get('/', status=500, extra_environ=extra_environ)
124 vcscontroller.get('/', status=500, extra_environ=extra_environ)
125 assert get_user.called
125 assert get_user.called
126
126
127
127
128 def test_returns_forbidden_if_no_anonymous_access(
128 def test_returns_forbidden_if_no_anonymous_access(
129 vcscontroller, disable_anonymous_user):
129 vcscontroller, disable_anonymous_user):
130 vcscontroller.get('/', status=401)
130 vcscontroller.get('/', status=401)
131
131
132
132
133 class StubFailVCSController(simplevcs.SimpleVCS):
133 class StubFailVCSController(simplevcs.SimpleVCS):
134 def _handle_request(self, environ, start_response):
134 def _handle_request(self, environ, start_response):
135 raise Exception("BOOM")
135 raise Exception("BOOM")
136
136
137
137
138 @pytest.fixture(scope='module')
138 @pytest.fixture(scope='module')
139 def fail_controller(baseapp):
139 def fail_controller(baseapp):
140 controller = StubFailVCSController(
140 controller = StubFailVCSController(
141 baseapp.config.get_settings(), baseapp.config)
141 baseapp.config.get_settings(), baseapp.config)
142 controller = HttpsFixup(controller, baseapp.config.get_settings())
142 controller = HttpsFixup(controller, baseapp.config.get_settings())
143 controller = CustomTestApp(controller)
143 controller = CustomTestApp(controller)
144 return controller
144 return controller
145
145
146
146
147 def test_handles_exceptions_as_internal_server_error(fail_controller):
147 def test_handles_exceptions_as_internal_server_error(fail_controller):
148 fail_controller.get('/', status=500)
148 fail_controller.get('/', status=500)
149
149
150
150
151 def test_provides_traceback_for_appenlight(fail_controller):
151 def test_provides_traceback_for_appenlight(fail_controller):
152 response = fail_controller.get(
152 response = fail_controller.get(
153 '/', status=500, extra_environ={'appenlight.client': 'fake'})
153 '/', status=500, extra_environ={'appenlight.client': 'fake'})
154 assert 'appenlight.__traceback' in response.request.environ
154 assert 'appenlight.__traceback' in response.request.environ
155
155
156
156
157 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
157 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
158 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
158 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
159 assert controller.scm_app is scm_app_http
159 assert controller.scm_app is scm_app_http
160
160
161
161
162 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
162 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
163 config = baseapp.config.get_settings().copy()
163 config = baseapp.config.get_settings().copy()
164 config['vcs.scm_app_implementation'] = (
164 config['vcs.scm_app_implementation'] = (
165 'rhodecode.tests.lib.middleware.mock_scm_app')
165 'rhodecode.tests.lib.middleware.mock_scm_app')
166 controller = StubVCSController(config, request_stub.registry)
166 controller = StubVCSController(config, request_stub.registry)
167 assert controller.scm_app is mock_scm_app
167 assert controller.scm_app is mock_scm_app
168
168
169
169
170 @pytest.mark.parametrize('query_string, expected', [
170 @pytest.mark.parametrize('query_string, expected', [
171 ('cmd=stub_command', True),
171 ('cmd=stub_command', True),
172 ('cmd=listkeys', False),
172 ('cmd=listkeys', False),
173 ])
173 ])
174 def test_should_check_locking(query_string, expected):
174 def test_should_check_locking(query_string, expected):
175 result = simplevcs._should_check_locking(query_string)
175 result = simplevcs._should_check_locking(query_string)
176 assert result == expected
176 assert result == expected
177
177
178
178
179 class TestShadowRepoRegularExpression(object):
179 class TestShadowRepoRegularExpression(object):
180 pr_segment = 'pull-request'
180 pr_segment = 'pull-request'
181 shadow_segment = 'repository'
181 shadow_segment = 'repository'
182
182
183 @pytest.mark.parametrize('url, expected', [
183 @pytest.mark.parametrize('url, expected', [
184 # repo with/without groups
184 # repo with/without groups
185 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
185 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
186 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
186 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
187 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
187 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
188 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
188 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
189
189
190 # pull request ID
190 # pull request ID
191 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
191 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
192 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
192 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
193 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
193 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
194 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
194 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
195
195
196 # unicode
196 # unicode
197 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
197 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
198 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
198 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
199
199
200 # trailing/leading slash
200 # trailing/leading slash
201 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
201 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
202 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
202 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
203 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
203 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
204
204
205 # misc
205 # misc
206 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
206 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
207 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
207 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
208 ])
208 ])
209 def test_shadow_repo_regular_expression(self, url, expected):
209 def test_shadow_repo_regular_expression(self, url, expected):
210 from rhodecode.lib.middleware.simplevcs import SimpleVCS
210 from rhodecode.lib.middleware.simplevcs import SimpleVCS
211 url = url.format(
211 url = url.format(
212 pr_segment=self.pr_segment,
212 pr_segment=self.pr_segment,
213 shadow_segment=self.shadow_segment)
213 shadow_segment=self.shadow_segment)
214 match_obj = SimpleVCS.shadow_repo_re.match(url)
214 match_obj = SimpleVCS.shadow_repo_re.match(url)
215 assert (match_obj is not None) == expected
215 assert (match_obj is not None) == expected
216
216
217
217
218 @pytest.mark.backends('git', 'hg')
218 @pytest.mark.backends('git', 'hg')
219 class TestShadowRepoExposure(object):
219 class TestShadowRepoExposure(object):
220
220
221 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
221 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
222 self, baseapp, request_stub):
222 self, baseapp, request_stub):
223 """
223 """
224 Check that a pull action to a shadow repo is propagated to the
224 Check that a pull action to a shadow repo is propagated to the
225 underlying wsgi app.
225 underlying wsgi app.
226 """
226 """
227 controller = StubVCSController(
227 controller = StubVCSController(
228 baseapp.config.get_settings(), request_stub.registry)
228 baseapp.config.get_settings(), request_stub.registry)
229 controller._check_ssl = mock.Mock()
229 controller._check_ssl = mock.Mock()
230 controller.is_shadow_repo = True
230 controller.is_shadow_repo = True
231 controller._action = 'pull'
231 controller._action = 'pull'
232 controller._is_shadow_repo_dir = True
232 controller._is_shadow_repo_dir = True
233 controller.stub_response_body = 'dummy body value'
233 controller.stub_response_body = 'dummy body value'
234 controller._get_default_cache_ttl = mock.Mock(
234 controller._get_default_cache_ttl = mock.Mock(
235 return_value=(False, 0))
235 return_value=(False, 0))
236
236
237 environ_stub = {
237 environ_stub = {
238 'HTTP_HOST': 'test.example.com',
238 'HTTP_HOST': 'test.example.com',
239 'HTTP_ACCEPT': 'application/mercurial',
239 'HTTP_ACCEPT': 'application/mercurial',
240 'REQUEST_METHOD': 'GET',
240 'REQUEST_METHOD': 'GET',
241 'wsgi.url_scheme': 'http',
241 'wsgi.url_scheme': 'http',
242 }
242 }
243
243
244 response = controller(environ_stub, mock.Mock())
244 response = controller(environ_stub, mock.Mock())
245 response_body = ''.join(response)
245 response_body = ''.join(response)
246
246
247 # Assert that we got the response from the wsgi app.
247 # Assert that we got the response from the wsgi app.
248 assert response_body == controller.stub_response_body
248 assert response_body == controller.stub_response_body
249
249
250 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
250 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
251 """
251 """
252 Check that a pull action to a shadow repo is propagated to the
252 Check that a pull action to a shadow repo is propagated to the
253 underlying wsgi app.
253 underlying wsgi app.
254 """
254 """
255 controller = StubVCSController(
255 controller = StubVCSController(
256 baseapp.config.get_settings(), request_stub.registry)
256 baseapp.config.get_settings(), request_stub.registry)
257 controller._check_ssl = mock.Mock()
257 controller._check_ssl = mock.Mock()
258 controller.is_shadow_repo = True
258 controller.is_shadow_repo = True
259 controller._action = 'pull'
259 controller._action = 'pull'
260 controller._is_shadow_repo_dir = False
260 controller._is_shadow_repo_dir = False
261 controller.stub_response_body = 'dummy body value'
261 controller.stub_response_body = 'dummy body value'
262 environ_stub = {
262 environ_stub = {
263 'HTTP_HOST': 'test.example.com',
263 'HTTP_HOST': 'test.example.com',
264 'HTTP_ACCEPT': 'application/mercurial',
264 'HTTP_ACCEPT': 'application/mercurial',
265 'REQUEST_METHOD': 'GET',
265 'REQUEST_METHOD': 'GET',
266 'wsgi.url_scheme': 'http',
266 'wsgi.url_scheme': 'http',
267 }
267 }
268
268
269 response = controller(environ_stub, mock.Mock())
269 response = controller(environ_stub, mock.Mock())
270 response_body = ''.join(response)
270 response_body = ''.join(response)
271
271
272 # Assert that we got the response from the wsgi app.
272 # Assert that we got the response from the wsgi app.
273 assert '404 Not Found' in response_body
273 assert '404 Not Found' in response_body
274
274
275 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
275 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
276 """
276 """
277 Check that a push action to a shadow repo is aborted.
277 Check that a push action to a shadow repo is aborted.
278 """
278 """
279 controller = StubVCSController(
279 controller = StubVCSController(
280 baseapp.config.get_settings(), request_stub.registry)
280 baseapp.config.get_settings(), request_stub.registry)
281 controller._check_ssl = mock.Mock()
281 controller._check_ssl = mock.Mock()
282 controller.is_shadow_repo = True
282 controller.is_shadow_repo = True
283 controller._action = 'push'
283 controller._action = 'push'
284 controller.stub_response_body = 'dummy body value'
284 controller.stub_response_body = 'dummy body value'
285 environ_stub = {
285 environ_stub = {
286 'HTTP_HOST': 'test.example.com',
286 'HTTP_HOST': 'test.example.com',
287 'HTTP_ACCEPT': 'application/mercurial',
287 'HTTP_ACCEPT': 'application/mercurial',
288 'REQUEST_METHOD': 'GET',
288 'REQUEST_METHOD': 'GET',
289 'wsgi.url_scheme': 'http',
289 'wsgi.url_scheme': 'http',
290 }
290 }
291
291
292 response = controller(environ_stub, mock.Mock())
292 response = controller(environ_stub, mock.Mock())
293 response_body = ''.join(response)
293 response_body = ''.join(response)
294
294
295 assert response_body != controller.stub_response_body
295 assert response_body != controller.stub_response_body
296 # Assert that a 406 error is returned.
296 # Assert that a 406 error is returned.
297 assert '406 Not Acceptable' in response_body
297 assert '406 Not Acceptable' in response_body
298
298
299 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
299 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
300 """
300 """
301 Check that the set_repo_names method sets all names to the one returned
301 Check that the set_repo_names method sets all names to the one returned
302 by the _get_repository_name method on a request to a non shadow repo.
302 by the _get_repository_name method on a request to a non shadow repo.
303 """
303 """
304 environ_stub = {}
304 environ_stub = {}
305 controller = StubVCSController(
305 controller = StubVCSController(
306 baseapp.config.get_settings(), request_stub.registry)
306 baseapp.config.get_settings(), request_stub.registry)
307 controller._name = 'RepoGroup/MyRepo'
307 controller._name = 'RepoGroup/MyRepo'
308 controller.set_repo_names(environ_stub)
308 controller.set_repo_names(environ_stub)
309 assert not controller.is_shadow_repo
309 assert not controller.is_shadow_repo
310 assert (controller.url_repo_name ==
310 assert (controller.url_repo_name ==
311 controller.acl_repo_name ==
311 controller.acl_repo_name ==
312 controller.vcs_repo_name ==
312 controller.vcs_repo_name ==
313 controller._get_repository_name(environ_stub))
313 controller._get_repository_name(environ_stub))
314
314
315 def test_set_repo_names_with_shadow(
315 def test_set_repo_names_with_shadow(
316 self, baseapp, pr_util, config_stub, request_stub):
316 self, baseapp, pr_util, config_stub, request_stub):
317 """
317 """
318 Check that the set_repo_names method sets correct names on a request
318 Check that the set_repo_names method sets correct names on a request
319 to a shadow repo.
319 to a shadow repo.
320 """
320 """
321 from rhodecode.model.pull_request import PullRequestModel
321 from rhodecode.model.pull_request import PullRequestModel
322
322
323 pull_request = pr_util.create_pull_request()
323 pull_request = pr_util.create_pull_request()
324 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
324 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
325 target=pull_request.target_repo.repo_name,
325 target=pull_request.target_repo.repo_name,
326 pr_id=pull_request.pull_request_id,
326 pr_id=pull_request.pull_request_id,
327 pr_segment=TestShadowRepoRegularExpression.pr_segment,
327 pr_segment=TestShadowRepoRegularExpression.pr_segment,
328 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
328 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
329 controller = StubVCSController(
329 controller = StubVCSController(
330 baseapp.config.get_settings(), request_stub.registry)
330 baseapp.config.get_settings(), request_stub.registry)
331 controller._name = shadow_url
331 controller._name = shadow_url
332 controller.set_repo_names({})
332 controller.set_repo_names({})
333
333
334 # Get file system path to shadow repo for assertions.
334 # Get file system path to shadow repo for assertions.
335 workspace_id = PullRequestModel()._workspace_id(pull_request)
335 workspace_id = PullRequestModel()._workspace_id(pull_request)
336 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
336 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
337
337
338 assert controller.vcs_repo_name == vcs_repo_name
338 assert controller.vcs_repo_name == vcs_repo_name
339 assert controller.url_repo_name == shadow_url
339 assert controller.url_repo_name == shadow_url
340 assert controller.acl_repo_name == pull_request.target_repo.repo_name
340 assert controller.acl_repo_name == pull_request.target_repo.repo_name
341 assert controller.is_shadow_repo
341 assert controller.is_shadow_repo
342
342
343 def test_set_repo_names_with_shadow_but_missing_pr(
343 def test_set_repo_names_with_shadow_but_missing_pr(
344 self, baseapp, pr_util, config_stub, request_stub):
344 self, baseapp, pr_util, config_stub, request_stub):
345 """
345 """
346 Checks that the set_repo_names method enforces matching target repos
346 Checks that the set_repo_names method enforces matching target repos
347 and pull request IDs.
347 and pull request IDs.
348 """
348 """
349 pull_request = pr_util.create_pull_request()
349 pull_request = pr_util.create_pull_request()
350 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
350 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
351 target=pull_request.target_repo.repo_name,
351 target=pull_request.target_repo.repo_name,
352 pr_id=999999999,
352 pr_id=999999999,
353 pr_segment=TestShadowRepoRegularExpression.pr_segment,
353 pr_segment=TestShadowRepoRegularExpression.pr_segment,
354 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
354 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
355 controller = StubVCSController(
355 controller = StubVCSController(
356 baseapp.config.get_settings(), request_stub.registry)
356 baseapp.config.get_settings(), request_stub.registry)
357 controller._name = shadow_url
357 controller._name = shadow_url
358 controller.set_repo_names({})
358 controller.set_repo_names({})
359
359
360 assert not controller.is_shadow_repo
360 assert not controller.is_shadow_repo
361 assert (controller.url_repo_name ==
361 assert (controller.url_repo_name ==
362 controller.acl_repo_name ==
362 controller.acl_repo_name ==
363 controller.vcs_repo_name)
363 controller.vcs_repo_name)
364
364
365
365
366 @pytest.mark.usefixtures('baseapp')
366 @pytest.mark.usefixtures('baseapp')
367 class TestGenerateVcsResponse(object):
367 class TestGenerateVcsResponse(object):
368
368
369 def test_ensures_that_start_response_is_called_early_enough(self):
369 def test_ensures_that_start_response_is_called_early_enough(self):
370 self.call_controller_with_response_body(iter(['a', 'b']))
370 self.call_controller_with_response_body(iter(['a', 'b']))
371 assert self.start_response.called
371 assert self.start_response.called
372
372
373 def test_invalidates_cache_after_body_is_consumed(self):
373 def test_invalidates_cache_after_body_is_consumed(self):
374 result = self.call_controller_with_response_body(iter(['a', 'b']))
374 result = self.call_controller_with_response_body(iter(['a', 'b']))
375 assert not self.was_cache_invalidated()
375 assert not self.was_cache_invalidated()
376 # Consume the result
376 # Consume the result
377 list(result)
377 list(result)
378 assert self.was_cache_invalidated()
378 assert self.was_cache_invalidated()
379
379
380 def test_raises_unknown_exceptions(self):
380 def test_raises_unknown_exceptions(self):
381 result = self.call_controller_with_response_body(
381 result = self.call_controller_with_response_body(
382 self.raise_result_iter(vcs_kind='unknown'))
382 self.raise_result_iter(vcs_kind='unknown'))
383 with pytest.raises(Exception):
383 with pytest.raises(Exception):
384 list(result)
384 list(result)
385
385
386 def test_prepare_callback_daemon_is_called(self):
386 def test_prepare_callback_daemon_is_called(self):
387 def side_effect(extras, environ, action, txn_id=None):
387 def side_effect(extras, environ, action, txn_id=None):
388 return DummyHooksCallbackDaemon(), extras
388 return DummyHooksCallbackDaemon(), extras
389
389
390 prepare_patcher = mock.patch.object(
390 prepare_patcher = mock.patch.object(
391 StubVCSController, '_prepare_callback_daemon')
391 StubVCSController, '_prepare_callback_daemon')
392 with prepare_patcher as prepare_mock:
392 with prepare_patcher as prepare_mock:
393 prepare_mock.side_effect = side_effect
393 prepare_mock.side_effect = side_effect
394 self.call_controller_with_response_body(iter(['a', 'b']))
394 self.call_controller_with_response_body(iter(['a', 'b']))
395 assert prepare_mock.called
395 assert prepare_mock.called
396 assert prepare_mock.call_count == 1
396 assert prepare_mock.call_count == 1
397
397
398 def call_controller_with_response_body(self, response_body):
398 def call_controller_with_response_body(self, response_body):
399 settings = {
399 settings = {
400 'base_path': 'fake_base_path',
400 'base_path': 'fake_base_path',
401 'vcs.hooks.protocol': 'http',
401 'vcs.hooks.protocol': 'http',
402 'vcs.hooks.direct_calls': False,
402 'vcs.hooks.direct_calls': False,
403 }
403 }
404 registry = AttributeDict()
404 registry = AttributeDict()
405 controller = StubVCSController(settings, registry)
405 controller = StubVCSController(settings, registry)
406 controller._invalidate_cache = mock.Mock()
406 controller._invalidate_cache = mock.Mock()
407 controller.stub_response_body = response_body
407 controller.stub_response_body = response_body
408 self.start_response = mock.Mock()
408 self.start_response = mock.Mock()
409 result = controller._generate_vcs_response(
409 result = controller._generate_vcs_response(
410 environ={}, start_response=self.start_response,
410 environ={}, start_response=self.start_response,
411 repo_path='fake_repo_path',
411 repo_path='fake_repo_path',
412 extras={}, action='push')
412 extras={}, action='push')
413 self.controller = controller
413 self.controller = controller
414 return result
414 return result
415
415
416 def raise_result_iter(self, vcs_kind='repo_locked'):
416 def raise_result_iter(self, vcs_kind='repo_locked'):
417 """
417 """
418 Simulates an exception due to a vcs raised exception if kind vcs_kind
418 Simulates an exception due to a vcs raised exception if kind vcs_kind
419 """
419 """
420 raise self.vcs_exception(vcs_kind=vcs_kind)
420 raise self.vcs_exception(vcs_kind=vcs_kind)
421 yield "never_reached"
421 yield "never_reached"
422
422
423 def vcs_exception(self, vcs_kind='repo_locked'):
423 def vcs_exception(self, vcs_kind='repo_locked'):
424 locked_exception = Exception('TEST_MESSAGE')
424 locked_exception = Exception('TEST_MESSAGE')
425 locked_exception._vcs_kind = vcs_kind
425 locked_exception._vcs_kind = vcs_kind
426 return locked_exception
426 return locked_exception
427
427
428 def was_cache_invalidated(self):
428 def was_cache_invalidated(self):
429 return self.controller._invalidate_cache.called
429 return self.controller._invalidate_cache.called
430
430
431
431
432 class TestInitializeGenerator(object):
432 class TestInitializeGenerator(object):
433
433
434 def test_drains_first_element(self):
434 def test_drains_first_element(self):
435 gen = self.factory(['__init__', 1, 2])
435 gen = self.factory(['__init__', 1, 2])
436 result = list(gen)
436 result = list(gen)
437 assert result == [1, 2]
437 assert result == [1, 2]
438
438
439 @pytest.mark.parametrize('values', [
439 @pytest.mark.parametrize('values', [
440 [],
440 [],
441 [1, 2],
441 [1, 2],
442 ])
442 ])
443 def test_raises_value_error(self, values):
443 def test_raises_value_error(self, values):
444 with pytest.raises(ValueError):
444 with pytest.raises(ValueError):
445 self.factory(values)
445 self.factory(values)
446
446
447 @simplevcs.initialize_generator
447 @simplevcs.initialize_generator
448 def factory(self, iterable):
448 def factory(self, iterable):
449 for elem in iterable:
449 for elem in iterable:
450 yield elem
450 yield elem
451
451
452
452
453 class TestPrepareHooksDaemon(object):
453 class TestPrepareHooksDaemon(object):
454 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
454 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
455 expected_extras = {'extra1': 'value1'}
455 expected_extras = {'extra1': 'value1'}
456 daemon = DummyHooksCallbackDaemon()
456 daemon = DummyHooksCallbackDaemon()
457
457
458 controller = StubVCSController(app_settings, request_stub.registry)
458 controller = StubVCSController(app_settings, request_stub.registry)
459 prepare_patcher = mock.patch.object(
459 prepare_patcher = mock.patch.object(
460 simplevcs, 'prepare_callback_daemon',
460 simplevcs, 'prepare_callback_daemon',
461 return_value=(daemon, expected_extras))
461 return_value=(daemon, expected_extras))
462 with prepare_patcher as prepare_mock:
462 with prepare_patcher as prepare_mock:
463 callback_daemon, extras = controller._prepare_callback_daemon(
463 callback_daemon, extras = controller._prepare_callback_daemon(
464 expected_extras.copy(), {}, 'push')
464 expected_extras.copy(), {}, 'push')
465 prepare_mock.assert_called_once_with(
465 prepare_mock.assert_called_once_with(
466 expected_extras,
466 expected_extras,
467 protocol=app_settings['vcs.hooks.protocol'],
467 protocol=app_settings['vcs.hooks.protocol'],
468 host=app_settings['vcs.hooks.host'],
468 host=app_settings['vcs.hooks.host'],
469 txn_id=None,
469 txn_id=None,
470 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
470 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
471
471
472 assert callback_daemon == daemon
472 assert callback_daemon == daemon
473 assert extras == extras
473 assert extras == extras
@@ -1,136 +1,136 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Checking the chunked data transfer via HTTP
22 Checking the chunked data transfer via HTTP
23 """
23 """
24
24
25 import os
25 import os
26 import time
26 import time
27 import subprocess32
27 import subprocess32
28
28
29 import pytest
29 import pytest
30 import requests
30 import requests
31
31
32 from rhodecode.lib.middleware.utils import scm_app_http
32 from rhodecode.lib.middleware.utils import scm_app_http
33 from rhodecode.tests.utils import wait_for_url
33 from rhodecode.tests.utils import wait_for_url
34
34
35
35
36 def test_does_chunked_end_to_end_transfer(scm_app):
36 def test_does_chunked_end_to_end_transfer(scm_app):
37 response = requests.post(scm_app, data='', stream=True)
37 response = requests.post(scm_app, data='', stream=True)
38 assert response.headers['Transfer-Encoding'] == 'chunked'
38 assert response.headers['Transfer-Encoding'] == 'chunked'
39 times = [time.time() for chunk in response.raw.read_chunked()]
39 times = [time.time() for chunk in response.raw.read_chunked()]
40 assert times[1] - times[0] > 0.1, "Chunks arrived at the same time"
40 assert times[1] - times[0] > 0.1, "Chunks arrived at the same time"
41
41
42
42
43 @pytest.fixture
43 @pytest.fixture()
44 def echo_app_chunking(request, available_port_factory):
44 def echo_app_chunking(request, available_port_factory):
45 """
45 """
46 Run the EchoApp via Waitress in a subprocess.
46 Run the EchoApp via Waitress in a subprocess.
47
47
48 Return the URL endpoint to reach the app.
48 Return the URL endpoint to reach the app.
49 """
49 """
50 port = available_port_factory()
50 port = available_port_factory()
51 command = (
51 command = (
52 'waitress-serve --send-bytes 1 --port {port} --call '
52 'waitress-serve --send-bytes 1 --port {port} --call '
53 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
53 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
54 ':create_echo_app')
54 ':create_echo_app')
55 command = command.format(port=port)
55 command = command.format(port=port)
56 proc = subprocess32.Popen(command.split(' '), bufsize=0)
56 proc = subprocess32.Popen(command.split(' '), bufsize=0)
57 echo_app_url = 'http://localhost:' + str(port)
57 echo_app_url = 'http://localhost:' + str(port)
58
58
59 @request.addfinalizer
59 @request.addfinalizer
60 def stop_echo_app():
60 def stop_echo_app():
61 proc.kill()
61 proc.kill()
62
62
63 return echo_app_url
63 return echo_app_url
64
64
65
65
66 @pytest.fixture
66 @pytest.fixture()
67 def scm_app(request, available_port_factory, echo_app_chunking):
67 def scm_app(request, available_port_factory, echo_app_chunking):
68 """
68 """
69 Run the scm_app in Waitress.
69 Run the scm_app in Waitress.
70
70
71 Returns the URL endpoint where this app can be reached.
71 Returns the URL endpoint where this app can be reached.
72 """
72 """
73 port = available_port_factory()
73 port = available_port_factory()
74 command = (
74 command = (
75 'waitress-serve --send-bytes 1 --port {port} --call '
75 'waitress-serve --send-bytes 1 --port {port} --call '
76 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
76 'rhodecode.tests.lib.middleware.utils.test_scm_app_http_chunking'
77 ':create_scm_app')
77 ':create_scm_app')
78 command = command.format(port=port)
78 command = command.format(port=port)
79 env = os.environ.copy()
79 env = os.environ.copy()
80 env["RC_ECHO_URL"] = echo_app_chunking
80 env["RC_ECHO_URL"] = echo_app_chunking
81 proc = subprocess32.Popen(command.split(' '), bufsize=0, env=env)
81 proc = subprocess32.Popen(command.split(' '), bufsize=0, env=env)
82 scm_app_url = 'http://localhost:' + str(port)
82 scm_app_url = 'http://localhost:' + str(port)
83 wait_for_url(scm_app_url)
83 wait_for_url(scm_app_url)
84
84
85 @request.addfinalizer
85 @request.addfinalizer
86 def stop_echo_app():
86 def stop_echo_app():
87 proc.kill()
87 proc.kill()
88
88
89 return scm_app_url
89 return scm_app_url
90
90
91
91
92 class EchoApp(object):
92 class EchoApp(object):
93 """
93 """
94 Stub WSGI application which returns a chunked response to every request.
94 Stub WSGI application which returns a chunked response to every request.
95 """
95 """
96
96
97 def __init__(self, repo_path, repo_name, config):
97 def __init__(self, repo_path, repo_name, config):
98 self._repo_path = repo_path
98 self._repo_path = repo_path
99
99
100 def __call__(self, environ, start_response):
100 def __call__(self, environ, start_response):
101 environ['wsgi.input'].read()
101 environ['wsgi.input'].read()
102 status = '200 OK'
102 status = '200 OK'
103 headers = []
103 headers = []
104 start_response(status, headers)
104 start_response(status, headers)
105 return result_generator()
105 return result_generator()
106
106
107
107
108 def result_generator():
108 def result_generator():
109 """
109 """
110 Simulate chunked results.
110 Simulate chunked results.
111
111
112 The intended usage is to simulate a chunked response as we would get it
112 The intended usage is to simulate a chunked response as we would get it
113 out of a vcs operation during a call to "hg clone".
113 out of a vcs operation during a call to "hg clone".
114 """
114 """
115 yield 'waiting 2 seconds'
115 yield 'waiting 2 seconds'
116 # Wait long enough so that the first chunk can go out
116 # Wait long enough so that the first chunk can go out
117 time.sleep(2)
117 time.sleep(2)
118 yield 'final chunk'
118 yield 'final chunk'
119 # Another small wait, otherwise they go together
119 # Another small wait, otherwise they go together
120 time.sleep(0.1)
120 time.sleep(0.1)
121
121
122
122
123 def create_echo_app():
123 def create_echo_app():
124 """
124 """
125 Create EchoApp filled with stub data.
125 Create EchoApp filled with stub data.
126 """
126 """
127 return EchoApp('stub_path', 'repo_name', {})
127 return EchoApp('stub_path', 'repo_name', {})
128
128
129
129
130 def create_scm_app():
130 def create_scm_app():
131 """
131 """
132 Create a scm_app hooked up to speak to EchoApp.
132 Create a scm_app hooked up to speak to EchoApp.
133 """
133 """
134 echo_app_url = os.environ["RC_ECHO_URL"]
134 echo_app_url = os.environ["RC_ECHO_URL"]
135 return scm_app_http.VcsHttpProxy(
135 return scm_app_http.VcsHttpProxy(
136 echo_app_url, 'stub_path', 'stub_name', None)
136 echo_app_url, 'stub_path', 'stub_name', None)
@@ -1,101 +1,101 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Tests checking the crypto backends which can be used by lib/auth.
22 Tests checking the crypto backends which can be used by lib/auth.
23 """
23 """
24 import collections
24 import collections
25
25
26 import pytest
26 import pytest
27
27
28 from rhodecode.lib import auth
28 from rhodecode.lib import auth
29
29
30
30
31 # Utility functions to get or check passwords
31 # Utility functions to get or check passwords
32
32
33 def test_get_crypt_password_accepts_unicode(password):
33 def test_get_crypt_password_accepts_unicode(password):
34 result = auth.get_crypt_password(password.value)
34 result = auth.get_crypt_password(password.value)
35 assert result == password.hashed
35 assert result == password.hashed
36
36
37
37
38 def test_check_password_accepts_unicode(password):
38 def test_check_password_accepts_unicode(password):
39 result = auth.check_password(password.value, password.hashed)
39 result = auth.check_password(password.value, password.hashed)
40 assert result
40 assert result
41
41
42
42
43 # API contracts from _RhodeCodeCryptoBase
43 # API contracts from _RhodeCodeCryptoBase
44
44
45 def test_constructor_takes_no_arguments(crypto_backend_class):
45 def test_constructor_takes_no_arguments(crypto_backend_class):
46 instance = crypto_backend_class()
46 instance = crypto_backend_class()
47 assert instance
47 assert instance
48
48
49
49
50 def test_hash_create_returns_bytes(crypto_backend, password):
50 def test_hash_create_returns_bytes(crypto_backend, password):
51 hashed = crypto_backend.hash_create(password.encoded)
51 hashed = crypto_backend.hash_create(password.encoded)
52 assert isinstance(hashed, str)
52 assert isinstance(hashed, str)
53
53
54
54
55 def test_hash_create_changes_the_value(crypto_backend, password):
55 def test_hash_create_changes_the_value(crypto_backend, password):
56 hashed = crypto_backend.hash_create(password.encoded)
56 hashed = crypto_backend.hash_create(password.encoded)
57 assert hashed != password.encoded
57 assert hashed != password.encoded
58
58
59
59
60 def test_hash_create_enforces_bytes(crypto_backend, password):
60 def test_hash_create_enforces_bytes(crypto_backend, password):
61 with pytest.raises(TypeError):
61 with pytest.raises(TypeError):
62 crypto_backend.hash_create(password.value)
62 crypto_backend.hash_create(password.value)
63
63
64
64
65 def test_hash_check(crypto_backend, password):
65 def test_hash_check(crypto_backend, password):
66 not_matching = 'stub-hash'
66 not_matching = 'stub-hash'
67 with pytest.raises(TypeError):
67 with pytest.raises(TypeError):
68 crypto_backend.hash_check(password.value, not_matching)
68 crypto_backend.hash_check(password.value, not_matching)
69
69
70
70
71 def test_hash_check_with_update_enforces_bytes(crypto_backend, password):
71 def test_hash_check_with_update_enforces_bytes(crypto_backend, password):
72 not_matching = 'stub-hash'
72 not_matching = 'stub-hash'
73 with pytest.raises(TypeError):
73 with pytest.raises(TypeError):
74 crypto_backend.hash_check_with_upgrade(password.value, not_matching)
74 crypto_backend.hash_check_with_upgrade(password.value, not_matching)
75
75
76
76
77 @pytest.fixture(params=[
77 @pytest.fixture(params=[
78 auth._RhodeCodeCryptoTest,
78 auth._RhodeCodeCryptoTest,
79 auth._RhodeCodeCryptoBCrypt,
79 auth._RhodeCodeCryptoBCrypt,
80 auth._RhodeCodeCryptoSha256,
80 auth._RhodeCodeCryptoSha256,
81 ])
81 ])
82 def crypto_backend_class(request):
82 def crypto_backend_class(request):
83 """
83 """
84 Parameterizes per crypto backend class.
84 Parameterizes per crypto backend class.
85 """
85 """
86 return request.param
86 return request.param
87
87
88
88
89 @pytest.fixture
89 @pytest.fixture()
90 def crypto_backend(crypto_backend_class):
90 def crypto_backend(crypto_backend_class):
91 return crypto_backend_class()
91 return crypto_backend_class()
92
92
93
93
94 @pytest.fixture
94 @pytest.fixture()
95 def password():
95 def password():
96 encoding = 'utf-8'
96 encoding = 'utf-8'
97 value = u'value'
97 value = u'value'
98 value_encoded = value.encode(encoding)
98 value_encoded = value.encode(encoding)
99 value_hashed = auth.crypto_backend().hash_create(value_encoded)
99 value_hashed = auth.crypto_backend().hash_create(value_encoded)
100 return collections.namedtuple('Password', 'value, encoded, hashed')(
100 return collections.namedtuple('Password', 'value, encoded, hashed')(
101 value, value_encoded, value_hashed)
101 value, value_encoded, value_hashed)
@@ -1,92 +1,92 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.lib.db_manage import DbManage
24 from rhodecode.lib.db_manage import DbManage
25 from rhodecode.model import db
25 from rhodecode.model import db
26
26
27
27
28 @pytest.fixture
28 @pytest.fixture()
29 def db_manage(baseapp):
29 def db_manage(baseapp):
30 db_manage = DbManage(
30 db_manage = DbManage(
31 log_sql=True, dbconf='fake', root='fake', tests=False,
31 log_sql=True, dbconf='fake', root='fake', tests=False,
32 cli_args={}, SESSION=db.Session())
32 cli_args={}, SESSION=db.Session())
33 return db_manage
33 return db_manage
34
34
35
35
36 @pytest.fixture(autouse=True)
36 @pytest.fixture(autouse=True)
37 def session_rollback(baseapp, request):
37 def session_rollback(baseapp, request):
38 """
38 """
39 Rollback the database session after the test run.
39 Rollback the database session after the test run.
40
40
41 Intended usage is for tests wich mess with the database but don't
41 Intended usage is for tests wich mess with the database but don't
42 commit. In this case a rollback after the test run will leave the database
42 commit. In this case a rollback after the test run will leave the database
43 in a clean state.
43 in a clean state.
44
44
45 This is still a workaround until we find a way to isolate the tests better
45 This is still a workaround until we find a way to isolate the tests better
46 from each other.
46 from each other.
47 """
47 """
48 @request.addfinalizer
48 @request.addfinalizer
49 def cleanup():
49 def cleanup():
50 db.Session().rollback()
50 db.Session().rollback()
51
51
52
52
53 def test_create_admin_and_prompt_uses_getpass(db_manage):
53 def test_create_admin_and_prompt_uses_getpass(db_manage):
54 db_manage.cli_args = {
54 db_manage.cli_args = {
55 'username': 'test',
55 'username': 'test',
56 'email': 'test@example.com'}
56 'email': 'test@example.com'}
57 with mock.patch('getpass.getpass', return_value='password') as getpass:
57 with mock.patch('getpass.getpass', return_value='password') as getpass:
58 db_manage.create_admin_and_prompt()
58 db_manage.create_admin_and_prompt()
59 assert getpass.called
59 assert getpass.called
60
60
61
61
62 def test_create_admin_and_prompt_sets_the_api_key(db_manage):
62 def test_create_admin_and_prompt_sets_the_api_key(db_manage):
63 db_manage.cli_args = {
63 db_manage.cli_args = {
64 'username': 'test',
64 'username': 'test',
65 'password': 'testpassword',
65 'password': 'testpassword',
66 'email': 'test@example.com',
66 'email': 'test@example.com',
67 'api_key': 'testkey'}
67 'api_key': 'testkey'}
68 with mock.patch.object(db_manage, 'create_user') as create_user:
68 with mock.patch.object(db_manage, 'create_user') as create_user:
69 db_manage.create_admin_and_prompt()
69 db_manage.create_admin_and_prompt()
70
70
71 assert create_user.call_args[1]['api_key'] == 'testkey'
71 assert create_user.call_args[1]['api_key'] == 'testkey'
72
72
73
73
74 @pytest.mark.parametrize('add_keys', [True, False])
74 @pytest.mark.parametrize('add_keys', [True, False])
75 def test_create_user_sets_the_api_key(db_manage, add_keys):
75 def test_create_user_sets_the_api_key(db_manage, add_keys):
76 username = 'test_add_keys_{}'.format(add_keys)
76 username = 'test_add_keys_{}'.format(add_keys)
77 db_manage.create_user(
77 db_manage.create_user(
78 username, 'testpassword', 'test@example.com',
78 username, 'testpassword', 'test@example.com',
79 api_key=add_keys)
79 api_key=add_keys)
80
80
81 user = db.User.get_by_username(username)
81 user = db.User.get_by_username(username)
82 if add_keys:
82 if add_keys:
83 assert 2 == len(user.auth_tokens)
83 assert 2 == len(user.auth_tokens)
84 else:
84 else:
85 # only feed token
85 # only feed token
86 assert 1 == len(user.auth_tokens)
86 assert 1 == len(user.auth_tokens)
87
87
88
88
89 def test_create_user_without_api_key(db_manage):
89 def test_create_user_without_api_key(db_manage):
90 db_manage.create_user('test', 'testpassword', 'test@example.com')
90 db_manage.create_user('test', 'testpassword', 'test@example.com')
91 user = db.User.get_by_username('test')
91 user = db.User.get_by_username('test')
92 assert user.api_key is None
92 assert user.api_key is None
@@ -1,825 +1,825 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import textwrap
21 import textwrap
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.codeblocks import DiffSet
26 from rhodecode.lib.codeblocks import DiffSet
27 from rhodecode.lib.diffs import (
27 from rhodecode.lib.diffs import (
28 DiffProcessor,
28 DiffProcessor,
29 NEW_FILENODE, DEL_FILENODE, MOD_FILENODE, RENAMED_FILENODE,
29 NEW_FILENODE, DEL_FILENODE, MOD_FILENODE, RENAMED_FILENODE,
30 CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE)
30 CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE)
31 from rhodecode.lib.utils2 import AttributeDict
31 from rhodecode.lib.utils2 import AttributeDict
32 from rhodecode.lib.vcs.backends.git import GitCommit
32 from rhodecode.lib.vcs.backends.git import GitCommit
33 from rhodecode.tests.fixture import Fixture, no_newline_id_generator
33 from rhodecode.tests.fixture import Fixture, no_newline_id_generator
34 from rhodecode.lib.vcs.backends.git.repository import GitDiff
34 from rhodecode.lib.vcs.backends.git.repository import GitDiff
35 from rhodecode.lib.vcs.backends.hg.repository import MercurialDiff
35 from rhodecode.lib.vcs.backends.hg.repository import MercurialDiff
36 from rhodecode.lib.vcs.backends.svn.repository import SubversionDiff
36 from rhodecode.lib.vcs.backends.svn.repository import SubversionDiff
37
37
38 fixture = Fixture()
38 fixture = Fixture()
39
39
40
40
41 def test_diffprocessor_as_html_with_comments():
41 def test_diffprocessor_as_html_with_comments():
42 raw_diff = textwrap.dedent('''
42 raw_diff = textwrap.dedent('''
43 diff --git a/setup.py b/setup.py
43 diff --git a/setup.py b/setup.py
44 index 5b36422..cfd698e 100755
44 index 5b36422..cfd698e 100755
45 --- a/setup.py
45 --- a/setup.py
46 +++ b/setup.py
46 +++ b/setup.py
47 @@ -2,7 +2,7 @@
47 @@ -2,7 +2,7 @@
48 #!/usr/bin/python
48 #!/usr/bin/python
49 # Setup file for X
49 # Setup file for X
50 # Copyright (C) No one
50 # Copyright (C) No one
51 -
51 -
52 +x
52 +x
53 try:
53 try:
54 from setuptools import setup, Extension
54 from setuptools import setup, Extension
55 except ImportError:
55 except ImportError:
56 ''')
56 ''')
57 diff = GitDiff(raw_diff)
57 diff = GitDiff(raw_diff)
58 processor = DiffProcessor(diff)
58 processor = DiffProcessor(diff)
59 processor.prepare()
59 processor.prepare()
60
60
61 # Note that the cell with the context in line 5 (in the html) has the
61 # Note that the cell with the context in line 5 (in the html) has the
62 # no-comment class, which will prevent the add comment icon to be displayed.
62 # no-comment class, which will prevent the add comment icon to be displayed.
63 expected_html = textwrap.dedent('''
63 expected_html = textwrap.dedent('''
64 <table class="code-difftable">
64 <table class="code-difftable">
65 <tr class="line context">
65 <tr class="line context">
66 <td class="add-comment-line"><span class="add-comment-content"></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
66 <td class="add-comment-line"><span class="add-comment-content"></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
67 <td class="lineno old">...</td>
67 <td class="lineno old">...</td>
68 <td class="lineno new">...</td>
68 <td class="lineno new">...</td>
69 <td class="code no-comment">
69 <td class="code no-comment">
70 <pre>@@ -2,7 +2,7 @@
70 <pre>@@ -2,7 +2,7 @@
71 </pre>
71 </pre>
72 </td>
72 </td>
73 </tr>
73 </tr>
74 <tr class="line unmod">
74 <tr class="line unmod">
75 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
75 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
76 <td id="setuppy_o2" class="lineno old"><a href="#setuppy_o2" class="tooltip"
76 <td id="setuppy_o2" class="lineno old"><a href="#setuppy_o2" class="tooltip"
77 title="Click to select line">2</a></td>
77 title="Click to select line">2</a></td>
78 <td id="setuppy_n2" class="lineno new"><a href="#setuppy_n2" class="tooltip"
78 <td id="setuppy_n2" class="lineno new"><a href="#setuppy_n2" class="tooltip"
79 title="Click to select line">2</a></td>
79 title="Click to select line">2</a></td>
80 <td class="code">
80 <td class="code">
81 <pre>#!/usr/bin/python
81 <pre>#!/usr/bin/python
82 </pre>
82 </pre>
83 </td>
83 </td>
84 </tr>
84 </tr>
85 <tr class="line unmod">
85 <tr class="line unmod">
86 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
86 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
87 <td id="setuppy_o3" class="lineno old"><a href="#setuppy_o3" class="tooltip"
87 <td id="setuppy_o3" class="lineno old"><a href="#setuppy_o3" class="tooltip"
88 title="Click to select line">3</a></td>
88 title="Click to select line">3</a></td>
89 <td id="setuppy_n3" class="lineno new"><a href="#setuppy_n3" class="tooltip"
89 <td id="setuppy_n3" class="lineno new"><a href="#setuppy_n3" class="tooltip"
90 title="Click to select line">3</a></td>
90 title="Click to select line">3</a></td>
91 <td class="code">
91 <td class="code">
92 <pre># Setup file for X
92 <pre># Setup file for X
93 </pre>
93 </pre>
94 </td>
94 </td>
95 </tr>
95 </tr>
96 <tr class="line unmod">
96 <tr class="line unmod">
97 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
97 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
98 <td id="setuppy_o4" class="lineno old"><a href="#setuppy_o4" class="tooltip"
98 <td id="setuppy_o4" class="lineno old"><a href="#setuppy_o4" class="tooltip"
99 title="Click to select line">4</a></td>
99 title="Click to select line">4</a></td>
100 <td id="setuppy_n4" class="lineno new"><a href="#setuppy_n4" class="tooltip"
100 <td id="setuppy_n4" class="lineno new"><a href="#setuppy_n4" class="tooltip"
101 title="Click to select line">4</a></td>
101 title="Click to select line">4</a></td>
102 <td class="code">
102 <td class="code">
103 <pre># Copyright (C) No one
103 <pre># Copyright (C) No one
104 </pre>
104 </pre>
105 </td>
105 </td>
106 </tr>
106 </tr>
107 <tr class="line del">
107 <tr class="line del">
108 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
108 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
109 <td id="setuppy_o5" class="lineno old"><a href="#setuppy_o5" class="tooltip"
109 <td id="setuppy_o5" class="lineno old"><a href="#setuppy_o5" class="tooltip"
110 title="Click to select line">5</a></td>
110 title="Click to select line">5</a></td>
111 <td class="lineno new"><a href="#setuppy_n" class="tooltip"
111 <td class="lineno new"><a href="#setuppy_n" class="tooltip"
112 title="Click to select line"></a></td>
112 title="Click to select line"></a></td>
113 <td class="code">
113 <td class="code">
114 <pre>
114 <pre>
115 </pre>
115 </pre>
116 </td>
116 </td>
117 </tr>
117 </tr>
118 <tr class="line add">
118 <tr class="line add">
119 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
119 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
120 <td class="lineno old"><a href="#setuppy_o" class="tooltip"
120 <td class="lineno old"><a href="#setuppy_o" class="tooltip"
121 title="Click to select line"></a></td>
121 title="Click to select line"></a></td>
122 <td id="setuppy_n5" class="lineno new"><a href="#setuppy_n5" class="tooltip"
122 <td id="setuppy_n5" class="lineno new"><a href="#setuppy_n5" class="tooltip"
123 title="Click to select line">5</a></td>
123 title="Click to select line">5</a></td>
124 <td class="code">
124 <td class="code">
125 <pre><ins>x</ins>
125 <pre><ins>x</ins>
126 </pre>
126 </pre>
127 </td>
127 </td>
128 </tr>
128 </tr>
129 <tr class="line unmod">
129 <tr class="line unmod">
130 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
130 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
131 <td id="setuppy_o6" class="lineno old"><a href="#setuppy_o6" class="tooltip"
131 <td id="setuppy_o6" class="lineno old"><a href="#setuppy_o6" class="tooltip"
132 title="Click to select line">6</a></td>
132 title="Click to select line">6</a></td>
133 <td id="setuppy_n6" class="lineno new"><a href="#setuppy_n6" class="tooltip"
133 <td id="setuppy_n6" class="lineno new"><a href="#setuppy_n6" class="tooltip"
134 title="Click to select line">6</a></td>
134 title="Click to select line">6</a></td>
135 <td class="code">
135 <td class="code">
136 <pre>try:
136 <pre>try:
137 </pre>
137 </pre>
138 </td>
138 </td>
139 </tr>
139 </tr>
140 <tr class="line unmod">
140 <tr class="line unmod">
141 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
141 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
142 <td id="setuppy_o7" class="lineno old"><a href="#setuppy_o7" class="tooltip"
142 <td id="setuppy_o7" class="lineno old"><a href="#setuppy_o7" class="tooltip"
143 title="Click to select line">7</a></td>
143 title="Click to select line">7</a></td>
144 <td id="setuppy_n7" class="lineno new"><a href="#setuppy_n7" class="tooltip"
144 <td id="setuppy_n7" class="lineno new"><a href="#setuppy_n7" class="tooltip"
145 title="Click to select line">7</a></td>
145 title="Click to select line">7</a></td>
146 <td class="code">
146 <td class="code">
147 <pre> from setuptools import setup, Extension
147 <pre> from setuptools import setup, Extension
148 </pre>
148 </pre>
149 </td>
149 </td>
150 </tr>
150 </tr>
151 <tr class="line unmod">
151 <tr class="line unmod">
152 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
152 <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>
153 <td id="setuppy_o8" class="lineno old"><a href="#setuppy_o8" class="tooltip"
153 <td id="setuppy_o8" class="lineno old"><a href="#setuppy_o8" class="tooltip"
154 title="Click to select line">8</a></td>
154 title="Click to select line">8</a></td>
155 <td id="setuppy_n8" class="lineno new"><a href="#setuppy_n8" class="tooltip"
155 <td id="setuppy_n8" class="lineno new"><a href="#setuppy_n8" class="tooltip"
156 title="Click to select line">8</a></td>
156 title="Click to select line">8</a></td>
157 <td class="code">
157 <td class="code">
158 <pre>except ImportError:
158 <pre>except ImportError:
159 </pre>
159 </pre>
160 </td>
160 </td>
161 </tr>
161 </tr>
162 </table>
162 </table>
163 ''').strip()
163 ''').strip()
164 html = processor.as_html(enable_comments=True).replace('\t', ' ')
164 html = processor.as_html(enable_comments=True).replace('\t', ' ')
165
165
166 assert html == expected_html
166 assert html == expected_html
167
167
168
168
169 class TestMixedFilenameEncodings(object):
169 class TestMixedFilenameEncodings(object):
170
170
171 @pytest.fixture(scope="class")
171 @pytest.fixture(scope="class")
172 def raw_diff(self):
172 def raw_diff(self):
173 return fixture.load_resource(
173 return fixture.load_resource(
174 'hg_diff_mixed_filename_encodings.diff')
174 'hg_diff_mixed_filename_encodings.diff')
175
175
176 @pytest.fixture
176 @pytest.fixture()
177 def processor(self, raw_diff):
177 def processor(self, raw_diff):
178 diff = MercurialDiff(raw_diff)
178 diff = MercurialDiff(raw_diff)
179 processor = DiffProcessor(diff)
179 processor = DiffProcessor(diff)
180 return processor
180 return processor
181
181
182 def test_filenames_are_decoded_to_unicode(self, processor):
182 def test_filenames_are_decoded_to_unicode(self, processor):
183 diff_data = processor.prepare()
183 diff_data = processor.prepare()
184 filenames = [item['filename'] for item in diff_data]
184 filenames = [item['filename'] for item in diff_data]
185 assert filenames == [
185 assert filenames == [
186 u'späcial-utf8.txt', u'sp�cial-cp1252.txt', u'sp�cial-latin1.txt']
186 u'späcial-utf8.txt', u'sp�cial-cp1252.txt', u'sp�cial-latin1.txt']
187
187
188 def test_raw_diff_is_decoded_to_unicode(self, processor):
188 def test_raw_diff_is_decoded_to_unicode(self, processor):
189 diff_data = processor.prepare()
189 diff_data = processor.prepare()
190 raw_diffs = [item['raw_diff'] for item in diff_data]
190 raw_diffs = [item['raw_diff'] for item in diff_data]
191 new_file_message = u'\nnew file mode 100644\n'
191 new_file_message = u'\nnew file mode 100644\n'
192 expected_raw_diffs = [
192 expected_raw_diffs = [
193 u' a/späcial-utf8.txt b/späcial-utf8.txt' + new_file_message,
193 u' a/späcial-utf8.txt b/späcial-utf8.txt' + new_file_message,
194 u' a/sp�cial-cp1252.txt b/sp�cial-cp1252.txt' + new_file_message,
194 u' a/sp�cial-cp1252.txt b/sp�cial-cp1252.txt' + new_file_message,
195 u' a/sp�cial-latin1.txt b/sp�cial-latin1.txt' + new_file_message]
195 u' a/sp�cial-latin1.txt b/sp�cial-latin1.txt' + new_file_message]
196 assert raw_diffs == expected_raw_diffs
196 assert raw_diffs == expected_raw_diffs
197
197
198 def test_as_raw_preserves_the_encoding(self, processor, raw_diff):
198 def test_as_raw_preserves_the_encoding(self, processor, raw_diff):
199 assert processor.as_raw() == raw_diff
199 assert processor.as_raw() == raw_diff
200
200
201
201
202 # TODO: mikhail: format the following data structure properly
202 # TODO: mikhail: format the following data structure properly
203 DIFF_FIXTURES = [
203 DIFF_FIXTURES = [
204 ('hg',
204 ('hg',
205 'hg_diff_add_single_binary_file.diff',
205 'hg_diff_add_single_binary_file.diff',
206 [('US Warszawa.jpg', 'A',
206 [('US Warszawa.jpg', 'A',
207 {'added': 0,
207 {'added': 0,
208 'deleted': 0,
208 'deleted': 0,
209 'binary': True,
209 'binary': True,
210 'ops': {NEW_FILENODE: 'new file 100755',
210 'ops': {NEW_FILENODE: 'new file 100755',
211 BIN_FILENODE: 'binary diff hidden'}}),
211 BIN_FILENODE: 'binary diff hidden'}}),
212 ]),
212 ]),
213 ('hg',
213 ('hg',
214 'hg_diff_mod_single_binary_file.diff',
214 'hg_diff_mod_single_binary_file.diff',
215 [('US Warszawa.jpg', 'M',
215 [('US Warszawa.jpg', 'M',
216 {'added': 0,
216 {'added': 0,
217 'deleted': 0,
217 'deleted': 0,
218 'binary': True,
218 'binary': True,
219 'ops': {MOD_FILENODE: 'modified file',
219 'ops': {MOD_FILENODE: 'modified file',
220 BIN_FILENODE: 'binary diff hidden'}}),
220 BIN_FILENODE: 'binary diff hidden'}}),
221 ]),
221 ]),
222 ('hg',
222 ('hg',
223 'hg_diff_mod_single_file_and_rename_and_chmod.diff',
223 'hg_diff_mod_single_file_and_rename_and_chmod.diff',
224 [('README', 'M',
224 [('README', 'M',
225 {'added': 3,
225 {'added': 3,
226 'deleted': 0,
226 'deleted': 0,
227 'binary': False,
227 'binary': False,
228 'ops': {MOD_FILENODE: 'modified file',
228 'ops': {MOD_FILENODE: 'modified file',
229 RENAMED_FILENODE: 'file renamed from README.rst to README',
229 RENAMED_FILENODE: 'file renamed from README.rst to README',
230 CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
230 CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
231 ]),
231 ]),
232 ('hg',
232 ('hg',
233 'hg_diff_no_newline.diff',
233 'hg_diff_no_newline.diff',
234 [('server.properties', 'M',
234 [('server.properties', 'M',
235 {'added': 2,
235 {'added': 2,
236 'deleted': 1,
236 'deleted': 1,
237 'binary': False,
237 'binary': False,
238 'ops': {MOD_FILENODE: 'modified file'}}),
238 'ops': {MOD_FILENODE: 'modified file'}}),
239 ]),
239 ]),
240 ('hg',
240 ('hg',
241 'hg_diff_mod_file_and_rename.diff',
241 'hg_diff_mod_file_and_rename.diff',
242 [('README.rst', 'M',
242 [('README.rst', 'M',
243 {'added': 3,
243 {'added': 3,
244 'deleted': 0,
244 'deleted': 0,
245 'binary': False,
245 'binary': False,
246 'ops': {MOD_FILENODE: 'modified file',
246 'ops': {MOD_FILENODE: 'modified file',
247 RENAMED_FILENODE: 'file renamed from README to README.rst'}}),
247 RENAMED_FILENODE: 'file renamed from README to README.rst'}}),
248 ]),
248 ]),
249 ('hg',
249 ('hg',
250 'hg_diff_del_single_binary_file.diff',
250 'hg_diff_del_single_binary_file.diff',
251 [('US Warszawa.jpg', 'D',
251 [('US Warszawa.jpg', 'D',
252 {'added': 0,
252 {'added': 0,
253 'deleted': 0,
253 'deleted': 0,
254 'binary': True,
254 'binary': True,
255 'ops': {DEL_FILENODE: 'deleted file',
255 'ops': {DEL_FILENODE: 'deleted file',
256 BIN_FILENODE: 'binary diff hidden'}}),
256 BIN_FILENODE: 'binary diff hidden'}}),
257 ]),
257 ]),
258 ('hg',
258 ('hg',
259 'hg_diff_chmod_and_mod_single_binary_file.diff',
259 'hg_diff_chmod_and_mod_single_binary_file.diff',
260 [('gravatar.png', 'M',
260 [('gravatar.png', 'M',
261 {'added': 0,
261 {'added': 0,
262 'deleted': 0,
262 'deleted': 0,
263 'binary': True,
263 'binary': True,
264 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
264 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
265 BIN_FILENODE: 'binary diff hidden'}}),
265 BIN_FILENODE: 'binary diff hidden'}}),
266 ]),
266 ]),
267 ('hg',
267 ('hg',
268 'hg_diff_chmod.diff',
268 'hg_diff_chmod.diff',
269 [('file', 'M',
269 [('file', 'M',
270 {'added': 0,
270 {'added': 0,
271 'deleted': 0,
271 'deleted': 0,
272 'binary': True,
272 'binary': True,
273 'ops': {CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
273 'ops': {CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
274 ]),
274 ]),
275 ('hg',
275 ('hg',
276 'hg_diff_rename_file.diff',
276 'hg_diff_rename_file.diff',
277 [('file_renamed', 'M',
277 [('file_renamed', 'M',
278 {'added': 0,
278 {'added': 0,
279 'deleted': 0,
279 'deleted': 0,
280 'binary': True,
280 'binary': True,
281 'ops': {RENAMED_FILENODE: 'file renamed from file to file_renamed'}}),
281 'ops': {RENAMED_FILENODE: 'file renamed from file to file_renamed'}}),
282 ]),
282 ]),
283 ('hg',
283 ('hg',
284 'hg_diff_rename_and_chmod_file.diff',
284 'hg_diff_rename_and_chmod_file.diff',
285 [('README', 'M',
285 [('README', 'M',
286 {'added': 0,
286 {'added': 0,
287 'deleted': 0,
287 'deleted': 0,
288 'binary': True,
288 'binary': True,
289 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
289 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
290 RENAMED_FILENODE: 'file renamed from README.rst to README'}}),
290 RENAMED_FILENODE: 'file renamed from README.rst to README'}}),
291 ]),
291 ]),
292 ('hg',
292 ('hg',
293 'hg_diff_binary_and_normal.diff',
293 'hg_diff_binary_and_normal.diff',
294 [('img/baseline-10px.png', 'A',
294 [('img/baseline-10px.png', 'A',
295 {'added': 0,
295 {'added': 0,
296 'deleted': 0,
296 'deleted': 0,
297 'binary': True,
297 'binary': True,
298 'ops': {NEW_FILENODE: 'new file 100644',
298 'ops': {NEW_FILENODE: 'new file 100644',
299 BIN_FILENODE: 'binary diff hidden'}}),
299 BIN_FILENODE: 'binary diff hidden'}}),
300 ('js/jquery/hashgrid.js', 'A',
300 ('js/jquery/hashgrid.js', 'A',
301 {'added': 340,
301 {'added': 340,
302 'deleted': 0,
302 'deleted': 0,
303 'binary': False,
303 'binary': False,
304 'ops': {NEW_FILENODE: 'new file 100755'}}),
304 'ops': {NEW_FILENODE: 'new file 100755'}}),
305 ('index.html', 'M',
305 ('index.html', 'M',
306 {'added': 3,
306 {'added': 3,
307 'deleted': 2,
307 'deleted': 2,
308 'binary': False,
308 'binary': False,
309 'ops': {MOD_FILENODE: 'modified file'}}),
309 'ops': {MOD_FILENODE: 'modified file'}}),
310 ('less/docs.less', 'M',
310 ('less/docs.less', 'M',
311 {'added': 34,
311 {'added': 34,
312 'deleted': 0,
312 'deleted': 0,
313 'binary': False,
313 'binary': False,
314 'ops': {MOD_FILENODE: 'modified file'}}),
314 'ops': {MOD_FILENODE: 'modified file'}}),
315 ('less/scaffolding.less', 'M',
315 ('less/scaffolding.less', 'M',
316 {'added': 1,
316 {'added': 1,
317 'deleted': 3,
317 'deleted': 3,
318 'binary': False,
318 'binary': False,
319 'ops': {MOD_FILENODE: 'modified file'}}),
319 'ops': {MOD_FILENODE: 'modified file'}}),
320 ('readme.markdown', 'M',
320 ('readme.markdown', 'M',
321 {'added': 1,
321 {'added': 1,
322 'deleted': 10,
322 'deleted': 10,
323 'binary': False,
323 'binary': False,
324 'ops': {MOD_FILENODE: 'modified file'}}),
324 'ops': {MOD_FILENODE: 'modified file'}}),
325 ('img/baseline-20px.png', 'D',
325 ('img/baseline-20px.png', 'D',
326 {'added': 0,
326 {'added': 0,
327 'deleted': 0,
327 'deleted': 0,
328 'binary': True,
328 'binary': True,
329 'ops': {DEL_FILENODE: 'deleted file',
329 'ops': {DEL_FILENODE: 'deleted file',
330 BIN_FILENODE: 'binary diff hidden'}}),
330 BIN_FILENODE: 'binary diff hidden'}}),
331 ('js/global.js', 'D',
331 ('js/global.js', 'D',
332 {'added': 0,
332 {'added': 0,
333 'deleted': 75,
333 'deleted': 75,
334 'binary': False,
334 'binary': False,
335 'ops': {DEL_FILENODE: 'deleted file'}})
335 'ops': {DEL_FILENODE: 'deleted file'}})
336 ]),
336 ]),
337 ('git',
337 ('git',
338 'git_diff_chmod.diff',
338 'git_diff_chmod.diff',
339 [('work-horus.xls', 'M',
339 [('work-horus.xls', 'M',
340 {'added': 0,
340 {'added': 0,
341 'deleted': 0,
341 'deleted': 0,
342 'binary': True,
342 'binary': True,
343 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}})
343 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}})
344 ]),
344 ]),
345 ('git',
345 ('git',
346 'git_diff_js_chars.diff',
346 'git_diff_js_chars.diff',
347 [('\\"><img src=x onerror=prompt(0)>/\\"><img src=x onerror=prompt(1)>.txt', 'M',
347 [('\\"><img src=x onerror=prompt(0)>/\\"><img src=x onerror=prompt(1)>.txt', 'M',
348 {'added': 1,
348 {'added': 1,
349 'deleted': 0,
349 'deleted': 0,
350 'binary': False,
350 'binary': False,
351 'ops': {MOD_FILENODE: 'modified file'}})
351 'ops': {MOD_FILENODE: 'modified file'}})
352 ]),
352 ]),
353 ('git',
353 ('git',
354 'git_diff_rename_file.diff',
354 'git_diff_rename_file.diff',
355 [('file.xls', 'M',
355 [('file.xls', 'M',
356 {'added': 0,
356 {'added': 0,
357 'deleted': 0,
357 'deleted': 0,
358 'binary': True,
358 'binary': True,
359 'ops': {
359 'ops': {
360 RENAMED_FILENODE: 'file renamed from work-horus.xls to file.xls'}})
360 RENAMED_FILENODE: 'file renamed from work-horus.xls to file.xls'}})
361 ]),
361 ]),
362 ('git',
362 ('git',
363 'git_diff_mod_single_binary_file.diff',
363 'git_diff_mod_single_binary_file.diff',
364 [('US Warszawa.jpg', 'M',
364 [('US Warszawa.jpg', 'M',
365 {'added': 0,
365 {'added': 0,
366 'deleted': 0,
366 'deleted': 0,
367 'binary': True,
367 'binary': True,
368 'ops': {MOD_FILENODE: 'modified file',
368 'ops': {MOD_FILENODE: 'modified file',
369 BIN_FILENODE: 'binary diff hidden'}})
369 BIN_FILENODE: 'binary diff hidden'}})
370 ]),
370 ]),
371 ('git',
371 ('git',
372 'git_diff_binary_and_normal.diff',
372 'git_diff_binary_and_normal.diff',
373 [('img/baseline-10px.png', 'A',
373 [('img/baseline-10px.png', 'A',
374 {'added': 0,
374 {'added': 0,
375 'deleted': 0,
375 'deleted': 0,
376 'binary': True,
376 'binary': True,
377 'ops': {NEW_FILENODE: 'new file 100644',
377 'ops': {NEW_FILENODE: 'new file 100644',
378 BIN_FILENODE: 'binary diff hidden'}}),
378 BIN_FILENODE: 'binary diff hidden'}}),
379 ('js/jquery/hashgrid.js', 'A',
379 ('js/jquery/hashgrid.js', 'A',
380 {'added': 340,
380 {'added': 340,
381 'deleted': 0,
381 'deleted': 0,
382 'binary': False,
382 'binary': False,
383 'ops': {NEW_FILENODE: 'new file 100755'}}),
383 'ops': {NEW_FILENODE: 'new file 100755'}}),
384 ('index.html', 'M',
384 ('index.html', 'M',
385 {'added': 3,
385 {'added': 3,
386 'deleted': 2,
386 'deleted': 2,
387 'binary': False,
387 'binary': False,
388 'ops': {MOD_FILENODE: 'modified file'}}),
388 'ops': {MOD_FILENODE: 'modified file'}}),
389 ('less/docs.less', 'M',
389 ('less/docs.less', 'M',
390 {'added': 34,
390 {'added': 34,
391 'deleted': 0,
391 'deleted': 0,
392 'binary': False,
392 'binary': False,
393 'ops': {MOD_FILENODE: 'modified file'}}),
393 'ops': {MOD_FILENODE: 'modified file'}}),
394 ('less/scaffolding.less', 'M',
394 ('less/scaffolding.less', 'M',
395 {'added': 1,
395 {'added': 1,
396 'deleted': 3,
396 'deleted': 3,
397 'binary': False,
397 'binary': False,
398 'ops': {MOD_FILENODE: 'modified file'}}),
398 'ops': {MOD_FILENODE: 'modified file'}}),
399 ('readme.markdown', 'M',
399 ('readme.markdown', 'M',
400 {'added': 1,
400 {'added': 1,
401 'deleted': 10,
401 'deleted': 10,
402 'binary': False,
402 'binary': False,
403 'ops': {MOD_FILENODE: 'modified file'}}),
403 'ops': {MOD_FILENODE: 'modified file'}}),
404 ('img/baseline-20px.png', 'D',
404 ('img/baseline-20px.png', 'D',
405 {'added': 0,
405 {'added': 0,
406 'deleted': 0,
406 'deleted': 0,
407 'binary': True,
407 'binary': True,
408 'ops': {DEL_FILENODE: 'deleted file',
408 'ops': {DEL_FILENODE: 'deleted file',
409 BIN_FILENODE: 'binary diff hidden'}}),
409 BIN_FILENODE: 'binary diff hidden'}}),
410 ('js/global.js', 'D',
410 ('js/global.js', 'D',
411 {'added': 0,
411 {'added': 0,
412 'deleted': 75,
412 'deleted': 75,
413 'binary': False,
413 'binary': False,
414 'ops': {DEL_FILENODE: 'deleted file'}}),
414 'ops': {DEL_FILENODE: 'deleted file'}}),
415 ]),
415 ]),
416 ('hg',
416 ('hg',
417 'diff_with_diff_data.diff',
417 'diff_with_diff_data.diff',
418 [('vcs/backends/base.py', 'M',
418 [('vcs/backends/base.py', 'M',
419 {'added': 18,
419 {'added': 18,
420 'deleted': 2,
420 'deleted': 2,
421 'binary': False,
421 'binary': False,
422 'ops': {MOD_FILENODE: 'modified file'}}),
422 'ops': {MOD_FILENODE: 'modified file'}}),
423 ('vcs/backends/git/repository.py', 'M',
423 ('vcs/backends/git/repository.py', 'M',
424 {'added': 46,
424 {'added': 46,
425 'deleted': 15,
425 'deleted': 15,
426 'binary': False,
426 'binary': False,
427 'ops': {MOD_FILENODE: 'modified file'}}),
427 'ops': {MOD_FILENODE: 'modified file'}}),
428 ('vcs/backends/hg.py', 'M',
428 ('vcs/backends/hg.py', 'M',
429 {'added': 22,
429 {'added': 22,
430 'deleted': 3,
430 'deleted': 3,
431 'binary': False,
431 'binary': False,
432 'ops': {MOD_FILENODE: 'modified file'}}),
432 'ops': {MOD_FILENODE: 'modified file'}}),
433 ('vcs/tests/test_git.py', 'M',
433 ('vcs/tests/test_git.py', 'M',
434 {'added': 5,
434 {'added': 5,
435 'deleted': 5,
435 'deleted': 5,
436 'binary': False,
436 'binary': False,
437 'ops': {MOD_FILENODE: 'modified file'}}),
437 'ops': {MOD_FILENODE: 'modified file'}}),
438 ('vcs/tests/test_repository.py', 'M',
438 ('vcs/tests/test_repository.py', 'M',
439 {'added': 174,
439 {'added': 174,
440 'deleted': 2,
440 'deleted': 2,
441 'binary': False,
441 'binary': False,
442 'ops': {MOD_FILENODE: 'modified file'}}),
442 'ops': {MOD_FILENODE: 'modified file'}}),
443 ]),
443 ]),
444 ('hg',
444 ('hg',
445 'hg_diff_copy_file.diff',
445 'hg_diff_copy_file.diff',
446 [('file2', 'M',
446 [('file2', 'M',
447 {'added': 0,
447 {'added': 0,
448 'deleted': 0,
448 'deleted': 0,
449 'binary': True,
449 'binary': True,
450 'ops': {COPIED_FILENODE: 'file copied from file1 to file2'}}),
450 'ops': {COPIED_FILENODE: 'file copied from file1 to file2'}}),
451 ]),
451 ]),
452 ('hg',
452 ('hg',
453 'hg_diff_copy_and_modify_file.diff',
453 'hg_diff_copy_and_modify_file.diff',
454 [('file3', 'M',
454 [('file3', 'M',
455 {'added': 1,
455 {'added': 1,
456 'deleted': 0,
456 'deleted': 0,
457 'binary': False,
457 'binary': False,
458 'ops': {COPIED_FILENODE: 'file copied from file2 to file3',
458 'ops': {COPIED_FILENODE: 'file copied from file2 to file3',
459 MOD_FILENODE: 'modified file'}}),
459 MOD_FILENODE: 'modified file'}}),
460 ]),
460 ]),
461 ('hg',
461 ('hg',
462 'hg_diff_copy_and_chmod_file.diff',
462 'hg_diff_copy_and_chmod_file.diff',
463 [('file4', 'M',
463 [('file4', 'M',
464 {'added': 0,
464 {'added': 0,
465 'deleted': 0,
465 'deleted': 0,
466 'binary': True,
466 'binary': True,
467 'ops': {COPIED_FILENODE: 'file copied from file3 to file4',
467 'ops': {COPIED_FILENODE: 'file copied from file3 to file4',
468 CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}),
468 CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}),
469 ]),
469 ]),
470 ('hg',
470 ('hg',
471 'hg_diff_copy_chmod_and_edit_file.diff',
471 'hg_diff_copy_chmod_and_edit_file.diff',
472 [('file5', 'M',
472 [('file5', 'M',
473 {'added': 2,
473 {'added': 2,
474 'deleted': 1,
474 'deleted': 1,
475 'binary': False,
475 'binary': False,
476 'ops': {COPIED_FILENODE: 'file copied from file4 to file5',
476 'ops': {COPIED_FILENODE: 'file copied from file4 to file5',
477 CHMOD_FILENODE: 'modified file chmod 100755 => 100644',
477 CHMOD_FILENODE: 'modified file chmod 100755 => 100644',
478 MOD_FILENODE: 'modified file'}})]),
478 MOD_FILENODE: 'modified file'}})]),
479
479
480 # Diffs to validate rename and copy file with space in its name
480 # Diffs to validate rename and copy file with space in its name
481 ('git',
481 ('git',
482 'git_diff_rename_file_with_spaces.diff',
482 'git_diff_rename_file_with_spaces.diff',
483 [('file_with_ two spaces.txt', 'M',
483 [('file_with_ two spaces.txt', 'M',
484 {'added': 0,
484 {'added': 0,
485 'deleted': 0,
485 'deleted': 0,
486 'binary': True,
486 'binary': True,
487 'ops': {
487 'ops': {
488 RENAMED_FILENODE: (
488 RENAMED_FILENODE: (
489 'file renamed from file_with_ spaces.txt to file_with_ '
489 'file renamed from file_with_ spaces.txt to file_with_ '
490 ' two spaces.txt')}
490 ' two spaces.txt')}
491 }), ]),
491 }), ]),
492 ('hg',
492 ('hg',
493 'hg_diff_rename_file_with_spaces.diff',
493 'hg_diff_rename_file_with_spaces.diff',
494 [('file_changed _.txt', 'M',
494 [('file_changed _.txt', 'M',
495 {'added': 0,
495 {'added': 0,
496 'deleted': 0,
496 'deleted': 0,
497 'binary': True,
497 'binary': True,
498 'ops': {
498 'ops': {
499 RENAMED_FILENODE: (
499 RENAMED_FILENODE: (
500 'file renamed from file_ with update.txt to file_changed'
500 'file renamed from file_ with update.txt to file_changed'
501 ' _.txt')}
501 ' _.txt')}
502 }), ]),
502 }), ]),
503 ('hg',
503 ('hg',
504 'hg_diff_copy_file_with_spaces.diff',
504 'hg_diff_copy_file_with_spaces.diff',
505 [('file_copied_ with spaces.txt', 'M',
505 [('file_copied_ with spaces.txt', 'M',
506 {'added': 0,
506 {'added': 0,
507 'deleted': 0,
507 'deleted': 0,
508 'binary': True,
508 'binary': True,
509 'ops': {
509 'ops': {
510 COPIED_FILENODE: (
510 COPIED_FILENODE: (
511 'file copied from file_changed_without_spaces.txt to'
511 'file copied from file_changed_without_spaces.txt to'
512 ' file_copied_ with spaces.txt')}
512 ' file_copied_ with spaces.txt')}
513 }),
513 }),
514 ]),
514 ]),
515
515
516 # special signs from git
516 # special signs from git
517 ('git',
517 ('git',
518 'git_diff_binary_special_files.diff',
518 'git_diff_binary_special_files.diff',
519 [('css/_Icon\\r', 'A',
519 [('css/_Icon\\r', 'A',
520 {'added': 0,
520 {'added': 0,
521 'deleted': 0,
521 'deleted': 0,
522 'binary': True,
522 'binary': True,
523 'ops': {NEW_FILENODE: 'new file 100644',
523 'ops': {NEW_FILENODE: 'new file 100644',
524 BIN_FILENODE: 'binary diff hidden'}
524 BIN_FILENODE: 'binary diff hidden'}
525 }),
525 }),
526 ]),
526 ]),
527 ('git',
527 ('git',
528 'git_diff_binary_special_files_2.diff',
528 'git_diff_binary_special_files_2.diff',
529 [('css/Icon\\r', 'A',
529 [('css/Icon\\r', 'A',
530 {'added': 0,
530 {'added': 0,
531 'deleted': 0,
531 'deleted': 0,
532 'binary': True,
532 'binary': True,
533 'ops': {NEW_FILENODE: 'new file 100644', }
533 'ops': {NEW_FILENODE: 'new file 100644', }
534 }),
534 }),
535 ]),
535 ]),
536
536
537 ('svn',
537 ('svn',
538 'svn_diff_binary_add_file.diff',
538 'svn_diff_binary_add_file.diff',
539 [('intl.dll', 'A',
539 [('intl.dll', 'A',
540 {'added': 0,
540 {'added': 0,
541 'deleted': 0,
541 'deleted': 0,
542 'binary': False,
542 'binary': False,
543 'ops': {NEW_FILENODE: 'new file 10644',
543 'ops': {NEW_FILENODE: 'new file 10644',
544 #TODO(Marcink): depends on binary detection on svn patches
544 #TODO(Marcink): depends on binary detection on svn patches
545 # BIN_FILENODE: 'binary diff hidden'
545 # BIN_FILENODE: 'binary diff hidden'
546 }
546 }
547 }),
547 }),
548 ]),
548 ]),
549
549
550 ('svn',
550 ('svn',
551 'svn_diff_multiple_changes.diff',
551 'svn_diff_multiple_changes.diff',
552 [('trunk/doc/images/SettingsOverlay.png', 'M',
552 [('trunk/doc/images/SettingsOverlay.png', 'M',
553 {'added': 0,
553 {'added': 0,
554 'deleted': 0,
554 'deleted': 0,
555 'binary': False,
555 'binary': False,
556 'ops': {MOD_FILENODE: 'modified file',
556 'ops': {MOD_FILENODE: 'modified file',
557 #TODO(Marcink): depends on binary detection on svn patches
557 #TODO(Marcink): depends on binary detection on svn patches
558 # BIN_FILENODE: 'binary diff hidden'
558 # BIN_FILENODE: 'binary diff hidden'
559 }
559 }
560 }),
560 }),
561 ('trunk/doc/source/de/tsvn_ch04.xml', 'M',
561 ('trunk/doc/source/de/tsvn_ch04.xml', 'M',
562 {'added': 89,
562 {'added': 89,
563 'deleted': 34,
563 'deleted': 34,
564 'binary': False,
564 'binary': False,
565 'ops': {MOD_FILENODE: 'modified file'}
565 'ops': {MOD_FILENODE: 'modified file'}
566 }),
566 }),
567 ('trunk/doc/source/en/tsvn_ch04.xml', 'M',
567 ('trunk/doc/source/en/tsvn_ch04.xml', 'M',
568 {'added': 66,
568 {'added': 66,
569 'deleted': 21,
569 'deleted': 21,
570 'binary': False,
570 'binary': False,
571 'ops': {MOD_FILENODE: 'modified file'}
571 'ops': {MOD_FILENODE: 'modified file'}
572 }),
572 }),
573 ('trunk/src/Changelog.txt', 'M',
573 ('trunk/src/Changelog.txt', 'M',
574 {'added': 2,
574 {'added': 2,
575 'deleted': 0,
575 'deleted': 0,
576 'binary': False,
576 'binary': False,
577 'ops': {MOD_FILENODE: 'modified file'}
577 'ops': {MOD_FILENODE: 'modified file'}
578 }),
578 }),
579 ('trunk/src/Resources/TortoiseProcENG.rc', 'M',
579 ('trunk/src/Resources/TortoiseProcENG.rc', 'M',
580 {'added': 19,
580 {'added': 19,
581 'deleted': 13,
581 'deleted': 13,
582 'binary': False,
582 'binary': False,
583 'ops': {MOD_FILENODE: 'modified file'}
583 'ops': {MOD_FILENODE: 'modified file'}
584 }),
584 }),
585 ('trunk/src/TortoiseProc/SetOverlayPage.cpp', 'M',
585 ('trunk/src/TortoiseProc/SetOverlayPage.cpp', 'M',
586 {'added': 16,
586 {'added': 16,
587 'deleted': 1,
587 'deleted': 1,
588 'binary': False,
588 'binary': False,
589 'ops': {MOD_FILENODE: 'modified file'}
589 'ops': {MOD_FILENODE: 'modified file'}
590 }),
590 }),
591 ('trunk/src/TortoiseProc/SetOverlayPage.h', 'M',
591 ('trunk/src/TortoiseProc/SetOverlayPage.h', 'M',
592 {'added': 3,
592 {'added': 3,
593 'deleted': 0,
593 'deleted': 0,
594 'binary': False,
594 'binary': False,
595 'ops': {MOD_FILENODE: 'modified file'}
595 'ops': {MOD_FILENODE: 'modified file'}
596 }),
596 }),
597 ('trunk/src/TortoiseProc/resource.h', 'M',
597 ('trunk/src/TortoiseProc/resource.h', 'M',
598 {'added': 2,
598 {'added': 2,
599 'deleted': 0,
599 'deleted': 0,
600 'binary': False,
600 'binary': False,
601 'ops': {MOD_FILENODE: 'modified file'}
601 'ops': {MOD_FILENODE: 'modified file'}
602 }),
602 }),
603 ('trunk/src/TortoiseShell/ShellCache.h', 'M',
603 ('trunk/src/TortoiseShell/ShellCache.h', 'M',
604 {'added': 50,
604 {'added': 50,
605 'deleted': 1,
605 'deleted': 1,
606 'binary': False,
606 'binary': False,
607 'ops': {MOD_FILENODE: 'modified file'}
607 'ops': {MOD_FILENODE: 'modified file'}
608 }),
608 }),
609 ]),
609 ]),
610
610
611 ]
611 ]
612
612
613 DIFF_FIXTURES_WITH_CONTENT = [
613 DIFF_FIXTURES_WITH_CONTENT = [
614 (
614 (
615 'hg', 'hg_diff_single_file_change_newline.diff',
615 'hg', 'hg_diff_single_file_change_newline.diff',
616 [
616 [
617 (
617 (
618 'file_b', # filename
618 'file_b', # filename
619 'A', # change
619 'A', # change
620 { # stats
620 { # stats
621 'added': 1,
621 'added': 1,
622 'deleted': 0,
622 'deleted': 0,
623 'binary': False,
623 'binary': False,
624 'ops': {NEW_FILENODE: 'new file 100644', }
624 'ops': {NEW_FILENODE: 'new file 100644', }
625 },
625 },
626 '@@ -0,0 +1 @@\n+test_content b\n' # diff
626 '@@ -0,0 +1 @@\n+test_content b\n' # diff
627 ),
627 ),
628 ],
628 ],
629 ),
629 ),
630 (
630 (
631 'hg', 'hg_diff_double_file_change_newline.diff',
631 'hg', 'hg_diff_double_file_change_newline.diff',
632 [
632 [
633 (
633 (
634 'file_b', # filename
634 'file_b', # filename
635 'A', # change
635 'A', # change
636 { # stats
636 { # stats
637 'added': 1,
637 'added': 1,
638 'deleted': 0,
638 'deleted': 0,
639 'binary': False,
639 'binary': False,
640 'ops': {NEW_FILENODE: 'new file 100644', }
640 'ops': {NEW_FILENODE: 'new file 100644', }
641 },
641 },
642 '@@ -0,0 +1 @@\n+test_content b\n' # diff
642 '@@ -0,0 +1 @@\n+test_content b\n' # diff
643 ),
643 ),
644 (
644 (
645 'file_c', # filename
645 'file_c', # filename
646 'A', # change
646 'A', # change
647 { # stats
647 { # stats
648 'added': 1,
648 'added': 1,
649 'deleted': 0,
649 'deleted': 0,
650 'binary': False,
650 'binary': False,
651 'ops': {NEW_FILENODE: 'new file 100644', }
651 'ops': {NEW_FILENODE: 'new file 100644', }
652 },
652 },
653 '@@ -0,0 +1 @@\n+test_content c\n' # diff
653 '@@ -0,0 +1 @@\n+test_content c\n' # diff
654 ),
654 ),
655 ],
655 ],
656 ),
656 ),
657 (
657 (
658 'hg', 'hg_diff_double_file_change_double_newline.diff',
658 'hg', 'hg_diff_double_file_change_double_newline.diff',
659 [
659 [
660 (
660 (
661 'file_b', # filename
661 'file_b', # filename
662 'A', # change
662 'A', # change
663 { # stats
663 { # stats
664 'added': 1,
664 'added': 1,
665 'deleted': 0,
665 'deleted': 0,
666 'binary': False,
666 'binary': False,
667 'ops': {NEW_FILENODE: 'new file 100644', }
667 'ops': {NEW_FILENODE: 'new file 100644', }
668 },
668 },
669 '@@ -0,0 +1 @@\n+test_content b\n\n' # diff
669 '@@ -0,0 +1 @@\n+test_content b\n\n' # diff
670 ),
670 ),
671 (
671 (
672 'file_c', # filename
672 'file_c', # filename
673 'A', # change
673 'A', # change
674 { # stats
674 { # stats
675 'added': 1,
675 'added': 1,
676 'deleted': 0,
676 'deleted': 0,
677 'binary': False,
677 'binary': False,
678 'ops': {NEW_FILENODE: 'new file 100644', }
678 'ops': {NEW_FILENODE: 'new file 100644', }
679 },
679 },
680 '@@ -0,0 +1 @@\n+test_content c\n' # diff
680 '@@ -0,0 +1 @@\n+test_content c\n' # diff
681 ),
681 ),
682 ],
682 ],
683 ),
683 ),
684 (
684 (
685 'hg', 'hg_diff_four_file_change_newline.diff',
685 'hg', 'hg_diff_four_file_change_newline.diff',
686 [
686 [
687 (
687 (
688 'file', # filename
688 'file', # filename
689 'A', # change
689 'A', # change
690 { # stats
690 { # stats
691 'added': 1,
691 'added': 1,
692 'deleted': 0,
692 'deleted': 0,
693 'binary': False,
693 'binary': False,
694 'ops': {NEW_FILENODE: 'new file 100644', }
694 'ops': {NEW_FILENODE: 'new file 100644', }
695 },
695 },
696 '@@ -0,0 +1,1 @@\n+file\n' # diff
696 '@@ -0,0 +1,1 @@\n+file\n' # diff
697 ),
697 ),
698 (
698 (
699 'file2', # filename
699 'file2', # filename
700 'A', # change
700 'A', # change
701 { # stats
701 { # stats
702 'added': 1,
702 'added': 1,
703 'deleted': 0,
703 'deleted': 0,
704 'binary': False,
704 'binary': False,
705 'ops': {NEW_FILENODE: 'new file 100644', }
705 'ops': {NEW_FILENODE: 'new file 100644', }
706 },
706 },
707 '@@ -0,0 +1,1 @@\n+another line\n' # diff
707 '@@ -0,0 +1,1 @@\n+another line\n' # diff
708 ),
708 ),
709 (
709 (
710 'file3', # filename
710 'file3', # filename
711 'A', # change
711 'A', # change
712 { # stats
712 { # stats
713 'added': 1,
713 'added': 1,
714 'deleted': 0,
714 'deleted': 0,
715 'binary': False,
715 'binary': False,
716 'ops': {NEW_FILENODE: 'new file 100644', }
716 'ops': {NEW_FILENODE: 'new file 100644', }
717 },
717 },
718 '@@ -0,0 +1,1 @@\n+newline\n' # diff
718 '@@ -0,0 +1,1 @@\n+newline\n' # diff
719 ),
719 ),
720 (
720 (
721 'file4', # filename
721 'file4', # filename
722 'A', # change
722 'A', # change
723 { # stats
723 { # stats
724 'added': 1,
724 'added': 1,
725 'deleted': 0,
725 'deleted': 0,
726 'binary': False,
726 'binary': False,
727 'ops': {NEW_FILENODE: 'new file 100644', }
727 'ops': {NEW_FILENODE: 'new file 100644', }
728 },
728 },
729 '@@ -0,0 +1,1 @@\n+fil4\n\\ No newline at end of file' # diff
729 '@@ -0,0 +1,1 @@\n+fil4\n\\ No newline at end of file' # diff
730 ),
730 ),
731 ],
731 ],
732 ),
732 ),
733
733
734 ]
734 ]
735
735
736
736
737 diff_class = {
737 diff_class = {
738 'git': GitDiff,
738 'git': GitDiff,
739 'hg': MercurialDiff,
739 'hg': MercurialDiff,
740 'svn': SubversionDiff,
740 'svn': SubversionDiff,
741 }
741 }
742
742
743
743
744 @pytest.fixture(params=DIFF_FIXTURES)
744 @pytest.fixture(params=DIFF_FIXTURES)
745 def diff_fixture(request):
745 def diff_fixture(request):
746 vcs, diff_fixture, expected = request.param
746 vcs, diff_fixture, expected = request.param
747 diff_txt = fixture.load_resource(diff_fixture)
747 diff_txt = fixture.load_resource(diff_fixture)
748 diff = diff_class[vcs](diff_txt)
748 diff = diff_class[vcs](diff_txt)
749 return diff, expected
749 return diff, expected
750
750
751
751
752 def test_diff_lib(diff_fixture):
752 def test_diff_lib(diff_fixture):
753 diff, expected_data = diff_fixture
753 diff, expected_data = diff_fixture
754 diff_proc = DiffProcessor(diff)
754 diff_proc = DiffProcessor(diff)
755 diff_proc_d = diff_proc.prepare()
755 diff_proc_d = diff_proc.prepare()
756 data = [(x['filename'], x['operation'], x['stats']) for x in diff_proc_d]
756 data = [(x['filename'], x['operation'], x['stats']) for x in diff_proc_d]
757 assert expected_data == data
757 assert expected_data == data
758
758
759
759
760 @pytest.fixture(params=DIFF_FIXTURES_WITH_CONTENT)
760 @pytest.fixture(params=DIFF_FIXTURES_WITH_CONTENT)
761 def diff_fixture_w_content(request):
761 def diff_fixture_w_content(request):
762 vcs, diff_fixture, expected = request.param
762 vcs, diff_fixture, expected = request.param
763 diff_txt = fixture.load_resource(diff_fixture)
763 diff_txt = fixture.load_resource(diff_fixture)
764 diff = diff_class[vcs](diff_txt)
764 diff = diff_class[vcs](diff_txt)
765 return diff, expected
765 return diff, expected
766
766
767
767
768 def test_diff_over_limit(request):
768 def test_diff_over_limit(request):
769
769
770 diff_limit = 1024
770 diff_limit = 1024
771 file_limit = 1024
771 file_limit = 1024
772
772
773 raw_diff = fixture.load_resource('large_diff.diff')
773 raw_diff = fixture.load_resource('large_diff.diff')
774 vcs_diff = GitDiff(raw_diff)
774 vcs_diff = GitDiff(raw_diff)
775 diff_processor = DiffProcessor(
775 diff_processor = DiffProcessor(
776 vcs_diff, format='newdiff', diff_limit=diff_limit, file_limit=file_limit,
776 vcs_diff, format='newdiff', diff_limit=diff_limit, file_limit=file_limit,
777 show_full_diff=False)
777 show_full_diff=False)
778
778
779 _parsed = diff_processor.prepare()
779 _parsed = diff_processor.prepare()
780
780
781 commit1 = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
781 commit1 = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
782 commit2 = GitCommit(repository=mock.Mock(), raw_id='abcdef34', idx=2)
782 commit2 = GitCommit(repository=mock.Mock(), raw_id='abcdef34', idx=2)
783
783
784 diffset = DiffSet(
784 diffset = DiffSet(
785 repo_name='repo_name',
785 repo_name='repo_name',
786 source_node_getter=lambda *a, **kw: AttributeDict({'commit': commit1}),
786 source_node_getter=lambda *a, **kw: AttributeDict({'commit': commit1}),
787 target_node_getter=lambda *a, **kw: AttributeDict({'commit': commit2})
787 target_node_getter=lambda *a, **kw: AttributeDict({'commit': commit2})
788 )
788 )
789
789
790 diffset = diffset.render_patchset(_parsed, commit1, commit2)
790 diffset = diffset.render_patchset(_parsed, commit1, commit2)
791
791
792 assert len(diffset.files) == 2
792 assert len(diffset.files) == 2
793 assert diffset.limited_diff is True
793 assert diffset.limited_diff is True
794 assert diffset.files[0].patch['filename'] == 'example.go'
794 assert diffset.files[0].patch['filename'] == 'example.go'
795 assert diffset.files[0].limited_diff is True
795 assert diffset.files[0].limited_diff is True
796
796
797 assert diffset.files[1].patch['filename'] == 'README.md'
797 assert diffset.files[1].patch['filename'] == 'README.md'
798 assert diffset.files[1].limited_diff is False
798 assert diffset.files[1].limited_diff is False
799
799
800
800
801 def test_diff_lib_newlines(diff_fixture_w_content):
801 def test_diff_lib_newlines(diff_fixture_w_content):
802 diff, expected_data = diff_fixture_w_content
802 diff, expected_data = diff_fixture_w_content
803 diff_proc = DiffProcessor(diff)
803 diff_proc = DiffProcessor(diff)
804 diff_proc_d = diff_proc.prepare()
804 diff_proc_d = diff_proc.prepare()
805 data = [(x['filename'], x['operation'], x['stats'], x['raw_diff'])
805 data = [(x['filename'], x['operation'], x['stats'], x['raw_diff'])
806 for x in diff_proc_d]
806 for x in diff_proc_d]
807 assert expected_data == data
807 assert expected_data == data
808
808
809
809
810 @pytest.mark.parametrize('input_str', [
810 @pytest.mark.parametrize('input_str', [
811 '',
811 '',
812 '\n',
812 '\n',
813 '\n\n',
813 '\n\n',
814 'First\n+second',
814 'First\n+second',
815 'First\n+second\n',
815 'First\n+second\n',
816
816
817 '\n\n\n Multi \n\n\n',
817 '\n\n\n Multi \n\n\n',
818 '\n\n\n Multi beginning',
818 '\n\n\n Multi beginning',
819 'Multi end \n\n\n',
819 'Multi end \n\n\n',
820 'Multi end',
820 'Multi end',
821 '@@ -0,0 +1 @@\n+test_content \n\n b\n'
821 '@@ -0,0 +1 @@\n+test_content \n\n b\n'
822 ], ids=no_newline_id_generator)
822 ], ids=no_newline_id_generator)
823 def test_splitlines(input_str):
823 def test_splitlines(input_str):
824 result = DiffProcessor.diff_splitter(input_str)
824 result = DiffProcessor.diff_splitter(input_str)
825 assert list(result) == input_str.splitlines(True)
825 assert list(result) == input_str.splitlines(True)
@@ -1,383 +1,383 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Tests of :mod:`rhodecode.lib.diffs` around the context of a specific line.
22 Tests of :mod:`rhodecode.lib.diffs` around the context of a specific line.
23 """
23 """
24
24
25 import textwrap
25 import textwrap
26
26
27 import pytest
27 import pytest
28
28
29 from rhodecode.lib import diffs
29 from rhodecode.lib import diffs
30 from rhodecode.lib.vcs.backends.git.diff import GitDiff
30 from rhodecode.lib.vcs.backends.git.diff import GitDiff
31
31
32
32
33 def test_context_of_new_and_old_line_number_raises(diff_processor):
33 def test_context_of_new_and_old_line_number_raises(diff_processor):
34 with pytest.raises(ValueError):
34 with pytest.raises(ValueError):
35 diff_processor.get_context_of_line(
35 diff_processor.get_context_of_line(
36 path='file.txt', diff_line=diffs.DiffLineNumber(old=7, new=7))
36 path='file.txt', diff_line=diffs.DiffLineNumber(old=7, new=7))
37
37
38
38
39 def test_context_of_an_old_line_number(diff_processor):
39 def test_context_of_an_old_line_number(diff_processor):
40 context = diff_processor.get_context_of_line(
40 context = diff_processor.get_context_of_line(
41 path='file.txt', diff_line=diffs.DiffLineNumber(old=7, new=None))
41 path='file.txt', diff_line=diffs.DiffLineNumber(old=7, new=None))
42 expected_context = [
42 expected_context = [
43 ('unmod', 'line04\n'),
43 ('unmod', 'line04\n'),
44 ('unmod', 'line05\n'),
44 ('unmod', 'line05\n'),
45 ('unmod', 'line06\n'),
45 ('unmod', 'line06\n'),
46 ('unmod', 'line07\n'),
46 ('unmod', 'line07\n'),
47 ('add', 'line07a Add after line07\n'),
47 ('add', 'line07a Add after line07\n'),
48 ('unmod', 'line08\n'),
48 ('unmod', 'line08\n'),
49 ('unmod', 'line09\n'),
49 ('unmod', 'line09\n'),
50 ]
50 ]
51 assert context == expected_context
51 assert context == expected_context
52
52
53
53
54 def test_context_of_a_new_line_number(diff_processor):
54 def test_context_of_a_new_line_number(diff_processor):
55 context = diff_processor.get_context_of_line(
55 context = diff_processor.get_context_of_line(
56 path='file.txt', diff_line=diffs.DiffLineNumber(old=None, new=8))
56 path='file.txt', diff_line=diffs.DiffLineNumber(old=None, new=8))
57 expected_context = [
57 expected_context = [
58 ('unmod', 'line05\n'),
58 ('unmod', 'line05\n'),
59 ('unmod', 'line06\n'),
59 ('unmod', 'line06\n'),
60 ('unmod', 'line07\n'),
60 ('unmod', 'line07\n'),
61 ('add', 'line07a Add after line07\n'),
61 ('add', 'line07a Add after line07\n'),
62 ('unmod', 'line08\n'),
62 ('unmod', 'line08\n'),
63 ('unmod', 'line09\n'),
63 ('unmod', 'line09\n'),
64 ('unmod', 'line10\n'),
64 ('unmod', 'line10\n'),
65 ]
65 ]
66 assert context == expected_context
66 assert context == expected_context
67
67
68
68
69 def test_context_of_an_invisible_line_beginning_of_hunk(diff_processor):
69 def test_context_of_an_invisible_line_beginning_of_hunk(diff_processor):
70 # Note: The caller has to pass in a diff which is suitable to satisfy
70 # Note: The caller has to pass in a diff which is suitable to satisfy
71 # its requirements. This test just ensures that we see a sane behavior.
71 # its requirements. This test just ensures that we see a sane behavior.
72 context = diff_processor.get_context_of_line(
72 context = diff_processor.get_context_of_line(
73 path='file.txt', diff_line=diffs.DiffLineNumber(old=None, new=3))
73 path='file.txt', diff_line=diffs.DiffLineNumber(old=None, new=3))
74 expected_context = [
74 expected_context = [
75 ('unmod', 'line02\n'),
75 ('unmod', 'line02\n'),
76 ('unmod', 'line03\n'),
76 ('unmod', 'line03\n'),
77 ('unmod', 'line04\n'),
77 ('unmod', 'line04\n'),
78 ('unmod', 'line05\n'),
78 ('unmod', 'line05\n'),
79 ('unmod', 'line06\n'),
79 ('unmod', 'line06\n'),
80 ]
80 ]
81 assert context == expected_context
81 assert context == expected_context
82
82
83
83
84 def test_context_of_an_invisible_line_end_of_hunk(diff_processor):
84 def test_context_of_an_invisible_line_end_of_hunk(diff_processor):
85 # Note: The caller has to pass in a diff which is suitable to satisfy
85 # Note: The caller has to pass in a diff which is suitable to satisfy
86 # its requirements. This test just ensures that we see a sane behavior.
86 # its requirements. This test just ensures that we see a sane behavior.
87 context = diff_processor.get_context_of_line(
87 context = diff_processor.get_context_of_line(
88 path='file.txt', diff_line=diffs.DiffLineNumber(old=12, new=None))
88 path='file.txt', diff_line=diffs.DiffLineNumber(old=12, new=None))
89 expected_context = [
89 expected_context = [
90 ('unmod', 'line09\n'),
90 ('unmod', 'line09\n'),
91 ('unmod', 'line10\n'),
91 ('unmod', 'line10\n'),
92 ('unmod', 'line11\n'),
92 ('unmod', 'line11\n'),
93 ('unmod', 'line12\n'),
93 ('unmod', 'line12\n'),
94 ('unmod', 'line13\n'),
94 ('unmod', 'line13\n'),
95 ]
95 ]
96 assert context == expected_context
96 assert context == expected_context
97
97
98
98
99 @pytest.mark.parametrize('diff_fixture', ['change-in-beginning.diff'])
99 @pytest.mark.parametrize('diff_fixture', ['change-in-beginning.diff'])
100 def test_context_of_an_incomplete_hunk_in_the_beginning(diff_processor):
100 def test_context_of_an_incomplete_hunk_in_the_beginning(diff_processor):
101 context = diff_processor.get_context_of_line(
101 context = diff_processor.get_context_of_line(
102 path='file.txt', diff_line=diffs.DiffLineNumber(old=None, new=2))
102 path='file.txt', diff_line=diffs.DiffLineNumber(old=None, new=2))
103 expected_context = [
103 expected_context = [
104 ('unmod', 'line01\n'),
104 ('unmod', 'line01\n'),
105 ('add', 'line01a Add line after line01\n'),
105 ('add', 'line01a Add line after line01\n'),
106 ('unmod', 'line02\n'),
106 ('unmod', 'line02\n'),
107 ('unmod', 'line03\n'),
107 ('unmod', 'line03\n'),
108 ('unmod', 'line04\n'),
108 ('unmod', 'line04\n'),
109 ]
109 ]
110 assert context == expected_context
110 assert context == expected_context
111
111
112
112
113 @pytest.mark.parametrize('diff_fixture', ['change-in-end.diff'])
113 @pytest.mark.parametrize('diff_fixture', ['change-in-end.diff'])
114 def test_context_of_an_incomplete_hunk_in_the_end(diff_processor):
114 def test_context_of_an_incomplete_hunk_in_the_end(diff_processor):
115 context = diff_processor.get_context_of_line(
115 context = diff_processor.get_context_of_line(
116 path='file.txt', diff_line=diffs.DiffLineNumber(old=None, new=80))
116 path='file.txt', diff_line=diffs.DiffLineNumber(old=None, new=80))
117 expected_context = [
117 expected_context = [
118 ('unmod', 'line36\n'),
118 ('unmod', 'line36\n'),
119 ('unmod', 'line37\n'),
119 ('unmod', 'line37\n'),
120 ('unmod', 'line38\n'),
120 ('unmod', 'line38\n'),
121 ('add', 'line38a Add line after line38\n'),
121 ('add', 'line38a Add line after line38\n'),
122 ('unmod', 'line39\n'),
122 ('unmod', 'line39\n'),
123 ]
123 ]
124 assert context == expected_context
124 assert context == expected_context
125
125
126
126
127 @pytest.mark.parametrize('diff_fixture', [
127 @pytest.mark.parametrize('diff_fixture', [
128 'single-line.diff',
128 'single-line.diff',
129 'single-line-two-files.diff',
129 'single-line-two-files.diff',
130 ])
130 ])
131 def test_appends_newline_for_each_context_line(diff_processor):
131 def test_appends_newline_for_each_context_line(diff_processor):
132 context = diff_processor.get_context_of_line(
132 context = diff_processor.get_context_of_line(
133 path='file_b', diff_line=diffs.DiffLineNumber(old=None, new=1))
133 path='file_b', diff_line=diffs.DiffLineNumber(old=None, new=1))
134 assert context == [('add', 'test_content\n')]
134 assert context == [('add', 'test_content\n')]
135
135
136
136
137 def test_context_of_a_missing_line_raises(diff_processor):
137 def test_context_of_a_missing_line_raises(diff_processor):
138 missing_line = 20
138 missing_line = 20
139 with pytest.raises(diffs.LineNotInDiffException):
139 with pytest.raises(diffs.LineNotInDiffException):
140 diff_processor.get_context_of_line(
140 diff_processor.get_context_of_line(
141 path='file.txt',
141 path='file.txt',
142 diff_line=diffs.DiffLineNumber(old=None, new=missing_line))
142 diff_line=diffs.DiffLineNumber(old=None, new=missing_line))
143
143
144
144
145 def test_context_of_a_missing_file_raises(diff_processor):
145 def test_context_of_a_missing_file_raises(diff_processor):
146 with pytest.raises(diffs.FileNotInDiffException):
146 with pytest.raises(diffs.FileNotInDiffException):
147 diff_processor.get_context_of_line(
147 diff_processor.get_context_of_line(
148 path='not_existing_file.txt',
148 path='not_existing_file.txt',
149 diff_line=diffs.DiffLineNumber(old=None, new=8))
149 diff_line=diffs.DiffLineNumber(old=None, new=8))
150
150
151
151
152 def test_find_context_with_full_context(diff_processor):
152 def test_find_context_with_full_context(diff_processor):
153 context_of_line_7 = [
153 context_of_line_7 = [
154 ('unmod', 'line05\n'),
154 ('unmod', 'line05\n'),
155 ('unmod', 'line06\n'),
155 ('unmod', 'line06\n'),
156 ('unmod', 'line07\n'),
156 ('unmod', 'line07\n'),
157 ('add', 'line07a Add after line07\n'),
157 ('add', 'line07a Add after line07\n'),
158 ('unmod', 'line08\n'),
158 ('unmod', 'line08\n'),
159 ('unmod', 'line09\n'),
159 ('unmod', 'line09\n'),
160 ('unmod', 'line10\n'),
160 ('unmod', 'line10\n'),
161 ]
161 ]
162 found_line = diff_processor.find_context(
162 found_line = diff_processor.find_context(
163 'file.txt', context_of_line_7, offset=3)
163 'file.txt', context_of_line_7, offset=3)
164 assert found_line == [diffs.DiffLineNumber(old=None, new=8)]
164 assert found_line == [diffs.DiffLineNumber(old=None, new=8)]
165
165
166
166
167 @pytest.mark.parametrize('diff_fixture', ['change-duplicated.diff'])
167 @pytest.mark.parametrize('diff_fixture', ['change-duplicated.diff'])
168 def test_find_context_multiple_times(diff_processor):
168 def test_find_context_multiple_times(diff_processor):
169 context = [
169 context = [
170 ('unmod', 'line04\n'),
170 ('unmod', 'line04\n'),
171 ('unmod', 'line05\n'),
171 ('unmod', 'line05\n'),
172 ('unmod', 'line06\n'),
172 ('unmod', 'line06\n'),
173 ('add', 'line06a add line\n'),
173 ('add', 'line06a add line\n'),
174 ('unmod', 'line07\n'),
174 ('unmod', 'line07\n'),
175 ('unmod', 'line08\n'),
175 ('unmod', 'line08\n'),
176 ('unmod', 'line09\n'),
176 ('unmod', 'line09\n'),
177 ]
177 ]
178 found_line = diff_processor.find_context('file.txt', context, offset=3)
178 found_line = diff_processor.find_context('file.txt', context, offset=3)
179 assert found_line == [
179 assert found_line == [
180 diffs.DiffLineNumber(old=None, new=7),
180 diffs.DiffLineNumber(old=None, new=7),
181 diffs.DiffLineNumber(old=None, new=49),
181 diffs.DiffLineNumber(old=None, new=49),
182 ]
182 ]
183
183
184
184
185 @pytest.mark.parametrize('offset', [20, -20, -1, 7])
185 @pytest.mark.parametrize('offset', [20, -20, -1, 7])
186 def test_find_context_offset_param_raises(diff_processor, offset):
186 def test_find_context_offset_param_raises(diff_processor, offset):
187 context_of_line_7 = [
187 context_of_line_7 = [
188 ('unmod', 'line04\n'),
188 ('unmod', 'line04\n'),
189 ('unmod', 'line05\n'),
189 ('unmod', 'line05\n'),
190 ('unmod', 'line06\n'),
190 ('unmod', 'line06\n'),
191 ('unmod', 'line07\n'),
191 ('unmod', 'line07\n'),
192 ('add', 'line07a Add after line07\n'),
192 ('add', 'line07a Add after line07\n'),
193 ('unmod', 'line08\n'),
193 ('unmod', 'line08\n'),
194 ('unmod', 'line09\n'),
194 ('unmod', 'line09\n'),
195 ]
195 ]
196 with pytest.raises(ValueError):
196 with pytest.raises(ValueError):
197 diff_processor.find_context(
197 diff_processor.find_context(
198 'file.txt', context_of_line_7, offset=offset)
198 'file.txt', context_of_line_7, offset=offset)
199
199
200
200
201 def test_find_context_beginning_of_chunk(diff_processor):
201 def test_find_context_beginning_of_chunk(diff_processor):
202 context_of_first_line = [
202 context_of_first_line = [
203 ('unmod', 'line02\n'),
203 ('unmod', 'line02\n'),
204 ('unmod', 'line03\n'),
204 ('unmod', 'line03\n'),
205 ('unmod', 'line04\n'),
205 ('unmod', 'line04\n'),
206 ('unmod', 'line05\n'),
206 ('unmod', 'line05\n'),
207 ]
207 ]
208 found_line = diff_processor.find_context(
208 found_line = diff_processor.find_context(
209 'file.txt', context_of_first_line, offset=0)
209 'file.txt', context_of_first_line, offset=0)
210 assert found_line == [diffs.DiffLineNumber(old=2, new=2)]
210 assert found_line == [diffs.DiffLineNumber(old=2, new=2)]
211
211
212
212
213 @pytest.mark.parametrize('diff_fixture', ['change-in-beginning.diff'])
213 @pytest.mark.parametrize('diff_fixture', ['change-in-beginning.diff'])
214 def test_find_context_beginning_of_file(diff_processor):
214 def test_find_context_beginning_of_file(diff_processor):
215 context_of_first_line = [
215 context_of_first_line = [
216 ('add', 'line01a Add line after line01\n'),
216 ('add', 'line01a Add line after line01\n'),
217 ('unmod', 'line02\n'),
217 ('unmod', 'line02\n'),
218 ('unmod', 'line03\n'),
218 ('unmod', 'line03\n'),
219 ('unmod', 'line04\n'),
219 ('unmod', 'line04\n'),
220 ('unmod', 'line05\n'),
220 ('unmod', 'line05\n'),
221 ('unmod', 'line06\n'),
221 ('unmod', 'line06\n'),
222 ('unmod', 'line07\n'),
222 ('unmod', 'line07\n'),
223 ]
223 ]
224 found_line = diff_processor.find_context(
224 found_line = diff_processor.find_context(
225 'file.txt', context_of_first_line, offset=3)
225 'file.txt', context_of_first_line, offset=3)
226 assert found_line == [diffs.DiffLineNumber(old=4, new=5)]
226 assert found_line == [diffs.DiffLineNumber(old=4, new=5)]
227
227
228
228
229 def test_find_context_end_of_chunk(diff_processor):
229 def test_find_context_end_of_chunk(diff_processor):
230 context_of_last_line = [
230 context_of_last_line = [
231 ('unmod', 'line10\n'),
231 ('unmod', 'line10\n'),
232 ('unmod', 'line11\n'),
232 ('unmod', 'line11\n'),
233 ('unmod', 'line12\n'),
233 ('unmod', 'line12\n'),
234 ('unmod', 'line13\n'),
234 ('unmod', 'line13\n'),
235 ]
235 ]
236 found_line = diff_processor.find_context(
236 found_line = diff_processor.find_context(
237 'file.txt', context_of_last_line, offset=3)
237 'file.txt', context_of_last_line, offset=3)
238 assert found_line == [diffs.DiffLineNumber(old=13, new=14)]
238 assert found_line == [diffs.DiffLineNumber(old=13, new=14)]
239
239
240
240
241 @pytest.fixture
241 @pytest.fixture()
242 def diff_processor(request, diff_fixture):
242 def diff_processor(request, diff_fixture):
243 raw_diff = diffs_store[diff_fixture]
243 raw_diff = diffs_store[diff_fixture]
244 diff = GitDiff(raw_diff)
244 diff = GitDiff(raw_diff)
245 processor = diffs.DiffProcessor(diff)
245 processor = diffs.DiffProcessor(diff)
246 processor.prepare()
246 processor.prepare()
247 return processor
247 return processor
248
248
249
249
250 @pytest.fixture
250 @pytest.fixture()
251 def diff_fixture():
251 def diff_fixture():
252 return 'default.diff'
252 return 'default.diff'
253
253
254
254
255 diff_default = textwrap.dedent("""
255 diff_default = textwrap.dedent("""
256 diff --git a/file.txt b/file.txt
256 diff --git a/file.txt b/file.txt
257 index 76e4f2e..6f8738f 100644
257 index 76e4f2e..6f8738f 100644
258 --- a/file.txt
258 --- a/file.txt
259 +++ b/file.txt
259 +++ b/file.txt
260 @@ -2,12 +2,13 @@ line01
260 @@ -2,12 +2,13 @@ line01
261 line02
261 line02
262 line03
262 line03
263 line04
263 line04
264 line05
264 line05
265 line06
265 line06
266 line07
266 line07
267 +line07a Add after line07
267 +line07a Add after line07
268 line08
268 line08
269 line09
269 line09
270 line10
270 line10
271 line11
271 line11
272 line12
272 line12
273 line13
273 line13
274 """)
274 """)
275
275
276
276
277 diff_beginning = textwrap.dedent("""
277 diff_beginning = textwrap.dedent("""
278 diff --git a/file.txt b/file.txt
278 diff --git a/file.txt b/file.txt
279 index 76e4f2e..47d39f4 100644
279 index 76e4f2e..47d39f4 100644
280 --- a/file.txt
280 --- a/file.txt
281 +++ b/file.txt
281 +++ b/file.txt
282 @@ -1,7 +1,8 @@
282 @@ -1,7 +1,8 @@
283 line01
283 line01
284 +line01a Add line after line01
284 +line01a Add line after line01
285 line02
285 line02
286 line03
286 line03
287 line04
287 line04
288 line05
288 line05
289 line06
289 line06
290 line07
290 line07
291 """)
291 """)
292
292
293
293
294 diff_end = textwrap.dedent("""
294 diff_end = textwrap.dedent("""
295 diff --git a/file.txt b/file.txt
295 diff --git a/file.txt b/file.txt
296 index 76e4f2e..b1304db 100644
296 index 76e4f2e..b1304db 100644
297 --- a/file.txt
297 --- a/file.txt
298 +++ b/file.txt
298 +++ b/file.txt
299 @@ -74,7 +74,8 @@ line32
299 @@ -74,7 +74,8 @@ line32
300 line33
300 line33
301 line34
301 line34
302 line35
302 line35
303 line36
303 line36
304 line37
304 line37
305 line38
305 line38
306 +line38a Add line after line38
306 +line38a Add line after line38
307 line39
307 line39
308 """)
308 """)
309
309
310
310
311 diff_duplicated_change = textwrap.dedent("""
311 diff_duplicated_change = textwrap.dedent("""
312 diff --git a/file.txt b/file.txt
312 diff --git a/file.txt b/file.txt
313 index 76e4f2e..55c2781 100644
313 index 76e4f2e..55c2781 100644
314 --- a/file.txt
314 --- a/file.txt
315 +++ b/file.txt
315 +++ b/file.txt
316 @@ -1,12 +1,13 @@
316 @@ -1,12 +1,13 @@
317 line01
317 line01
318 line02
318 line02
319 line03
319 line03
320 line04
320 line04
321 line05
321 line05
322 line06
322 line06
323 +line06a add line
323 +line06a add line
324 line07
324 line07
325 line08
325 line08
326 line09
326 line09
327 line10
327 line10
328 line11
328 line11
329 line12
329 line12
330 @@ -42,12 +43,13 @@ line39
330 @@ -42,12 +43,13 @@ line39
331 line01
331 line01
332 line02
332 line02
333 line03
333 line03
334 line04
334 line04
335 line05
335 line05
336 line06
336 line06
337 +line06a add line
337 +line06a add line
338 line07
338 line07
339 line08
339 line08
340 line09
340 line09
341 line10
341 line10
342 line11
342 line11
343 line12
343 line12
344 """)
344 """)
345
345
346
346
347 diff_single_line = textwrap.dedent("""
347 diff_single_line = textwrap.dedent("""
348 diff --git a/file_b b/file_b
348 diff --git a/file_b b/file_b
349 new file mode 100644
349 new file mode 100644
350 index 00000000..915e94ff
350 index 00000000..915e94ff
351 --- /dev/null
351 --- /dev/null
352 +++ b/file_b
352 +++ b/file_b
353 @@ -0,0 +1 @@
353 @@ -0,0 +1 @@
354 +test_content
354 +test_content
355 """)
355 """)
356
356
357
357
358 diff_single_line_two_files = textwrap.dedent("""
358 diff_single_line_two_files = textwrap.dedent("""
359 diff --git a/file_b b/file_b
359 diff --git a/file_b b/file_b
360 new file mode 100644
360 new file mode 100644
361 index 00000000..915e94ff
361 index 00000000..915e94ff
362 --- /dev/null
362 --- /dev/null
363 +++ b/file_b
363 +++ b/file_b
364 @@ -0,0 +1 @@
364 @@ -0,0 +1 @@
365 +test_content
365 +test_content
366 diff --git a/file_c b/file_c
366 diff --git a/file_c b/file_c
367 new file mode 100644
367 new file mode 100644
368 index 00000000..915e94ff
368 index 00000000..915e94ff
369 --- /dev/null
369 --- /dev/null
370 +++ b/file_c
370 +++ b/file_c
371 @@ -0,0 +1 @@
371 @@ -0,0 +1 @@
372 +test_content
372 +test_content
373 """)
373 """)
374
374
375
375
376 diffs_store = {
376 diffs_store = {
377 'default.diff': diff_default,
377 'default.diff': diff_default,
378 'change-in-beginning.diff': diff_beginning,
378 'change-in-beginning.diff': diff_beginning,
379 'change-in-end.diff': diff_end,
379 'change-in-end.diff': diff_end,
380 'change-duplicated.diff': diff_duplicated_change,
380 'change-duplicated.diff': diff_duplicated_change,
381 'single-line.diff': diff_single_line,
381 'single-line.diff': diff_single_line,
382 'single-line-two-files.diff': diff_single_line_two_files,
382 'single-line-two-files.diff': diff_single_line_two_files,
383 }
383 }
@@ -1,151 +1,151 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 from rhodecode.model.db import Session, UserLog
23 from rhodecode.model.db import Session, UserLog
24 from rhodecode.lib import hooks_base, utils2
24 from rhodecode.lib import hooks_base, utils2
25
25
26
26
27 def test_post_push_truncates_commits(user_regular, repo_stub):
27 def test_post_push_truncates_commits(user_regular, repo_stub):
28 extras = {
28 extras = {
29 'ip': '127.0.0.1',
29 'ip': '127.0.0.1',
30 'username': user_regular.username,
30 'username': user_regular.username,
31 'user_id': user_regular.user_id,
31 'user_id': user_regular.user_id,
32 'action': 'push_local',
32 'action': 'push_local',
33 'repository': repo_stub.repo_name,
33 'repository': repo_stub.repo_name,
34 'scm': 'git',
34 'scm': 'git',
35 'config': '',
35 'config': '',
36 'server_url': 'http://example.com',
36 'server_url': 'http://example.com',
37 'make_lock': None,
37 'make_lock': None,
38 'user_agent': 'some-client',
38 'user_agent': 'some-client',
39 'locked_by': [None],
39 'locked_by': [None],
40 'commit_ids': ['abcde12345' * 4] * 30000,
40 'commit_ids': ['abcde12345' * 4] * 30000,
41 'hook_type': 'large_push_test_type',
41 'hook_type': 'large_push_test_type',
42 'is_shadow_repo': False,
42 'is_shadow_repo': False,
43 }
43 }
44 extras = utils2.AttributeDict(extras)
44 extras = utils2.AttributeDict(extras)
45
45
46 hooks_base.post_push(extras)
46 hooks_base.post_push(extras)
47
47
48 # Calculate appropriate action string here
48 # Calculate appropriate action string here
49 commit_ids = extras.commit_ids[:400]
49 commit_ids = extras.commit_ids[:400]
50
50
51 entry = UserLog.query().order_by('-user_log_id').first()
51 entry = UserLog.query().order_by('-user_log_id').first()
52 assert entry.action == 'user.push'
52 assert entry.action == 'user.push'
53 assert entry.action_data['commit_ids'] == commit_ids
53 assert entry.action_data['commit_ids'] == commit_ids
54 Session().delete(entry)
54 Session().delete(entry)
55 Session().commit()
55 Session().commit()
56
56
57
57
58 def assert_called_with_mock(callable_, expected_mock_name):
58 def assert_called_with_mock(callable_, expected_mock_name):
59 mock_obj = callable_.call_args[0][0]
59 mock_obj = callable_.call_args[0][0]
60 mock_name = mock_obj._mock_new_parent._mock_new_name
60 mock_name = mock_obj._mock_new_parent._mock_new_name
61 assert mock_name == expected_mock_name
61 assert mock_name == expected_mock_name
62
62
63
63
64 @pytest.fixture
64 @pytest.fixture()
65 def hook_extras(user_regular, repo_stub):
65 def hook_extras(user_regular, repo_stub):
66 extras = utils2.AttributeDict({
66 extras = utils2.AttributeDict({
67 'ip': '127.0.0.1',
67 'ip': '127.0.0.1',
68 'username': user_regular.username,
68 'username': user_regular.username,
69 'user_id': user_regular.user_id,
69 'user_id': user_regular.user_id,
70 'action': 'push',
70 'action': 'push',
71 'repository': repo_stub.repo_name,
71 'repository': repo_stub.repo_name,
72 'scm': '',
72 'scm': '',
73 'config': '',
73 'config': '',
74 'repo_store': '',
74 'repo_store': '',
75 'server_url': 'http://example.com',
75 'server_url': 'http://example.com',
76 'make_lock': None,
76 'make_lock': None,
77 'user_agent': 'some-client',
77 'user_agent': 'some-client',
78 'locked_by': [None],
78 'locked_by': [None],
79 'commit_ids': [],
79 'commit_ids': [],
80 'hook_type': 'test_type',
80 'hook_type': 'test_type',
81 'is_shadow_repo': False,
81 'is_shadow_repo': False,
82 })
82 })
83 return extras
83 return extras
84
84
85
85
86 @pytest.mark.parametrize('func, extension, event', [
86 @pytest.mark.parametrize('func, extension, event', [
87 (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'),
87 (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'),
88 (hooks_base.post_push, 'post_pull_extension', 'RepoPushEvent'),
88 (hooks_base.post_push, 'post_pull_extension', 'RepoPushEvent'),
89 (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'),
89 (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'),
90 (hooks_base.post_pull, 'post_push_extension', 'RepoPullEvent'),
90 (hooks_base.post_pull, 'post_push_extension', 'RepoPullEvent'),
91 ])
91 ])
92 def test_hooks_propagate(func, extension, event, hook_extras):
92 def test_hooks_propagate(func, extension, event, hook_extras):
93 """
93 """
94 Tests that our hook code propagates to rhodecode extensions and triggers
94 Tests that our hook code propagates to rhodecode extensions and triggers
95 the appropriate event.
95 the appropriate event.
96 """
96 """
97 class ExtensionMock(mock.Mock):
97 class ExtensionMock(mock.Mock):
98 @property
98 @property
99 def output(self):
99 def output(self):
100 return 'MOCK'
100 return 'MOCK'
101
101
102 extension_mock = ExtensionMock()
102 extension_mock = ExtensionMock()
103 events_mock = mock.Mock()
103 events_mock = mock.Mock()
104 patches = {
104 patches = {
105 'Repository': mock.Mock(),
105 'Repository': mock.Mock(),
106 'events': events_mock,
106 'events': events_mock,
107 extension: extension_mock,
107 extension: extension_mock,
108 }
108 }
109
109
110 # Clear shadow repo flag.
110 # Clear shadow repo flag.
111 hook_extras.is_shadow_repo = False
111 hook_extras.is_shadow_repo = False
112
112
113 # Execute hook function.
113 # Execute hook function.
114 with mock.patch.multiple(hooks_base, **patches):
114 with mock.patch.multiple(hooks_base, **patches):
115 func(hook_extras)
115 func(hook_extras)
116
116
117 # Assert that extensions are called and event was fired.
117 # Assert that extensions are called and event was fired.
118 extension_mock.called_once()
118 extension_mock.called_once()
119 assert_called_with_mock(events_mock.trigger, event)
119 assert_called_with_mock(events_mock.trigger, event)
120
120
121
121
122 @pytest.mark.parametrize('func, extension, event', [
122 @pytest.mark.parametrize('func, extension, event', [
123 (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'),
123 (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'),
124 (hooks_base.post_push, 'post_pull_extension', 'RepoPushEvent'),
124 (hooks_base.post_push, 'post_pull_extension', 'RepoPushEvent'),
125 (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'),
125 (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'),
126 (hooks_base.post_pull, 'post_push_extension', 'RepoPullEvent'),
126 (hooks_base.post_pull, 'post_push_extension', 'RepoPullEvent'),
127 ])
127 ])
128 def test_hooks_propagates_not_on_shadow(func, extension, event, hook_extras):
128 def test_hooks_propagates_not_on_shadow(func, extension, event, hook_extras):
129 """
129 """
130 If hooks are called by a request to a shadow repo we only want to run our
130 If hooks are called by a request to a shadow repo we only want to run our
131 internal hooks code but not external ones like rhodecode extensions or
131 internal hooks code but not external ones like rhodecode extensions or
132 trigger an event.
132 trigger an event.
133 """
133 """
134 extension_mock = mock.Mock()
134 extension_mock = mock.Mock()
135 events_mock = mock.Mock()
135 events_mock = mock.Mock()
136 patches = {
136 patches = {
137 'Repository': mock.Mock(),
137 'Repository': mock.Mock(),
138 'events': events_mock,
138 'events': events_mock,
139 extension: extension_mock,
139 extension: extension_mock,
140 }
140 }
141
141
142 # Set shadow repo flag.
142 # Set shadow repo flag.
143 hook_extras.is_shadow_repo = True
143 hook_extras.is_shadow_repo = True
144
144
145 # Execute hook function.
145 # Execute hook function.
146 with mock.patch.multiple(hooks_base, **patches):
146 with mock.patch.multiple(hooks_base, **patches):
147 func(hook_extras)
147 func(hook_extras)
148
148
149 # Assert that extensions are *not* called and event was *not* fired.
149 # Assert that extensions are *not* called and event was *not* fired.
150 assert not extension_mock.called
150 assert not extension_mock.called
151 assert not events_mock.trigger.called
151 assert not events_mock.trigger.called
@@ -1,332 +1,332 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import logging
22 import logging
23 from StringIO import StringIO
23 from StringIO import StringIO
24
24
25 import mock
25 import mock
26 import pytest
26 import pytest
27
27
28 from rhodecode.lib import hooks_daemon
28 from rhodecode.lib import hooks_daemon
29 from rhodecode.tests.utils import assert_message_in_log
29 from rhodecode.tests.utils import assert_message_in_log
30
30
31
31
32 class TestDummyHooksCallbackDaemon(object):
32 class TestDummyHooksCallbackDaemon(object):
33 def test_hooks_module_path_set_properly(self):
33 def test_hooks_module_path_set_properly(self):
34 daemon = hooks_daemon.DummyHooksCallbackDaemon()
34 daemon = hooks_daemon.DummyHooksCallbackDaemon()
35 assert daemon.hooks_module == 'rhodecode.lib.hooks_daemon'
35 assert daemon.hooks_module == 'rhodecode.lib.hooks_daemon'
36
36
37 def test_logs_entering_the_hook(self):
37 def test_logs_entering_the_hook(self):
38 daemon = hooks_daemon.DummyHooksCallbackDaemon()
38 daemon = hooks_daemon.DummyHooksCallbackDaemon()
39 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
39 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
40 with daemon as return_value:
40 with daemon as return_value:
41 log_mock.assert_called_once_with(
41 log_mock.assert_called_once_with(
42 'Running `%s` callback daemon', 'DummyHooksCallbackDaemon')
42 'Running `%s` callback daemon', 'DummyHooksCallbackDaemon')
43 assert return_value == daemon
43 assert return_value == daemon
44
44
45 def test_logs_exiting_the_hook(self):
45 def test_logs_exiting_the_hook(self):
46 daemon = hooks_daemon.DummyHooksCallbackDaemon()
46 daemon = hooks_daemon.DummyHooksCallbackDaemon()
47 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
47 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
48 with daemon:
48 with daemon:
49 pass
49 pass
50 log_mock.assert_called_with(
50 log_mock.assert_called_with(
51 'Exiting `%s` callback daemon', 'DummyHooksCallbackDaemon')
51 'Exiting `%s` callback daemon', 'DummyHooksCallbackDaemon')
52
52
53
53
54 class TestHooks(object):
54 class TestHooks(object):
55 def test_hooks_can_be_used_as_a_context_processor(self):
55 def test_hooks_can_be_used_as_a_context_processor(self):
56 hooks = hooks_daemon.Hooks()
56 hooks = hooks_daemon.Hooks()
57 with hooks as return_value:
57 with hooks as return_value:
58 pass
58 pass
59 assert hooks == return_value
59 assert hooks == return_value
60
60
61
61
62 class TestHooksHttpHandler(object):
62 class TestHooksHttpHandler(object):
63 def test_read_request_parses_method_name_and_arguments(self):
63 def test_read_request_parses_method_name_and_arguments(self):
64 data = {
64 data = {
65 'method': 'test',
65 'method': 'test',
66 'extras': {
66 'extras': {
67 'param1': 1,
67 'param1': 1,
68 'param2': 'a'
68 'param2': 'a'
69 }
69 }
70 }
70 }
71 request = self._generate_post_request(data)
71 request = self._generate_post_request(data)
72 hooks_patcher = mock.patch.object(
72 hooks_patcher = mock.patch.object(
73 hooks_daemon.Hooks, data['method'], create=True, return_value=1)
73 hooks_daemon.Hooks, data['method'], create=True, return_value=1)
74
74
75 with hooks_patcher as hooks_mock:
75 with hooks_patcher as hooks_mock:
76 MockServer(hooks_daemon.HooksHttpHandler, request)
76 MockServer(hooks_daemon.HooksHttpHandler, request)
77
77
78 hooks_mock.assert_called_once_with(data['extras'])
78 hooks_mock.assert_called_once_with(data['extras'])
79
79
80 def test_hooks_serialized_result_is_returned(self):
80 def test_hooks_serialized_result_is_returned(self):
81 request = self._generate_post_request({})
81 request = self._generate_post_request({})
82 rpc_method = 'test'
82 rpc_method = 'test'
83 hook_result = {
83 hook_result = {
84 'first': 'one',
84 'first': 'one',
85 'second': 2
85 'second': 2
86 }
86 }
87 read_patcher = mock.patch.object(
87 read_patcher = mock.patch.object(
88 hooks_daemon.HooksHttpHandler, '_read_request',
88 hooks_daemon.HooksHttpHandler, '_read_request',
89 return_value=(rpc_method, {}))
89 return_value=(rpc_method, {}))
90 hooks_patcher = mock.patch.object(
90 hooks_patcher = mock.patch.object(
91 hooks_daemon.Hooks, rpc_method, create=True,
91 hooks_daemon.Hooks, rpc_method, create=True,
92 return_value=hook_result)
92 return_value=hook_result)
93
93
94 with read_patcher, hooks_patcher:
94 with read_patcher, hooks_patcher:
95 server = MockServer(hooks_daemon.HooksHttpHandler, request)
95 server = MockServer(hooks_daemon.HooksHttpHandler, request)
96
96
97 expected_result = json.dumps(hook_result)
97 expected_result = json.dumps(hook_result)
98 assert server.request.output_stream.buflist[-1] == expected_result
98 assert server.request.output_stream.buflist[-1] == expected_result
99
99
100 def test_exception_is_returned_in_response(self):
100 def test_exception_is_returned_in_response(self):
101 request = self._generate_post_request({})
101 request = self._generate_post_request({})
102 rpc_method = 'test'
102 rpc_method = 'test'
103 read_patcher = mock.patch.object(
103 read_patcher = mock.patch.object(
104 hooks_daemon.HooksHttpHandler, '_read_request',
104 hooks_daemon.HooksHttpHandler, '_read_request',
105 return_value=(rpc_method, {}))
105 return_value=(rpc_method, {}))
106 hooks_patcher = mock.patch.object(
106 hooks_patcher = mock.patch.object(
107 hooks_daemon.Hooks, rpc_method, create=True,
107 hooks_daemon.Hooks, rpc_method, create=True,
108 side_effect=Exception('Test exception'))
108 side_effect=Exception('Test exception'))
109
109
110 with read_patcher, hooks_patcher:
110 with read_patcher, hooks_patcher:
111 server = MockServer(hooks_daemon.HooksHttpHandler, request)
111 server = MockServer(hooks_daemon.HooksHttpHandler, request)
112
112
113 org_exc = json.loads(server.request.output_stream.buflist[-1])
113 org_exc = json.loads(server.request.output_stream.buflist[-1])
114 expected_result = {
114 expected_result = {
115 'exception': 'Exception',
115 'exception': 'Exception',
116 'exception_traceback': org_exc['exception_traceback'],
116 'exception_traceback': org_exc['exception_traceback'],
117 'exception_args': ['Test exception']
117 'exception_args': ['Test exception']
118 }
118 }
119 assert org_exc == expected_result
119 assert org_exc == expected_result
120
120
121 def test_log_message_writes_to_debug_log(self, caplog):
121 def test_log_message_writes_to_debug_log(self, caplog):
122 ip_port = ('0.0.0.0', 8888)
122 ip_port = ('0.0.0.0', 8888)
123 handler = hooks_daemon.HooksHttpHandler(
123 handler = hooks_daemon.HooksHttpHandler(
124 MockRequest('POST /'), ip_port, mock.Mock())
124 MockRequest('POST /'), ip_port, mock.Mock())
125 fake_date = '1/Nov/2015 00:00:00'
125 fake_date = '1/Nov/2015 00:00:00'
126 date_patcher = mock.patch.object(
126 date_patcher = mock.patch.object(
127 handler, 'log_date_time_string', return_value=fake_date)
127 handler, 'log_date_time_string', return_value=fake_date)
128 with date_patcher, caplog.at_level(logging.DEBUG):
128 with date_patcher, caplog.at_level(logging.DEBUG):
129 handler.log_message('Some message %d, %s', 123, 'string')
129 handler.log_message('Some message %d, %s', 123, 'string')
130
130
131 expected_message = '{} - - [{}] Some message 123, string'.format(
131 expected_message = '{} - - [{}] Some message 123, string'.format(
132 ip_port[0], fake_date)
132 ip_port[0], fake_date)
133 assert_message_in_log(
133 assert_message_in_log(
134 caplog.records, expected_message,
134 caplog.records, expected_message,
135 levelno=logging.DEBUG, module='hooks_daemon')
135 levelno=logging.DEBUG, module='hooks_daemon')
136
136
137 def _generate_post_request(self, data):
137 def _generate_post_request(self, data):
138 payload = json.dumps(data)
138 payload = json.dumps(data)
139 return 'POST / HTTP/1.0\nContent-Length: {}\n\n{}'.format(
139 return 'POST / HTTP/1.0\nContent-Length: {}\n\n{}'.format(
140 len(payload), payload)
140 len(payload), payload)
141
141
142
142
143 class ThreadedHookCallbackDaemon(object):
143 class ThreadedHookCallbackDaemon(object):
144 def test_constructor_calls_prepare(self):
144 def test_constructor_calls_prepare(self):
145 prepare_daemon_patcher = mock.patch.object(
145 prepare_daemon_patcher = mock.patch.object(
146 hooks_daemon.ThreadedHookCallbackDaemon, '_prepare')
146 hooks_daemon.ThreadedHookCallbackDaemon, '_prepare')
147 with prepare_daemon_patcher as prepare_daemon_mock:
147 with prepare_daemon_patcher as prepare_daemon_mock:
148 hooks_daemon.ThreadedHookCallbackDaemon()
148 hooks_daemon.ThreadedHookCallbackDaemon()
149 prepare_daemon_mock.assert_called_once_with()
149 prepare_daemon_mock.assert_called_once_with()
150
150
151 def test_run_is_called_on_context_start(self):
151 def test_run_is_called_on_context_start(self):
152 patchers = mock.patch.multiple(
152 patchers = mock.patch.multiple(
153 hooks_daemon.ThreadedHookCallbackDaemon,
153 hooks_daemon.ThreadedHookCallbackDaemon,
154 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
154 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
155
155
156 with patchers as mocks:
156 with patchers as mocks:
157 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
157 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
158 with daemon as daemon_context:
158 with daemon as daemon_context:
159 pass
159 pass
160 mocks['_run'].assert_called_once_with()
160 mocks['_run'].assert_called_once_with()
161 assert daemon_context == daemon
161 assert daemon_context == daemon
162
162
163 def test_stop_is_called_on_context_exit(self):
163 def test_stop_is_called_on_context_exit(self):
164 patchers = mock.patch.multiple(
164 patchers = mock.patch.multiple(
165 hooks_daemon.ThreadedHookCallbackDaemon,
165 hooks_daemon.ThreadedHookCallbackDaemon,
166 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
166 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
167
167
168 with patchers as mocks:
168 with patchers as mocks:
169 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
169 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
170 with daemon as daemon_context:
170 with daemon as daemon_context:
171 assert mocks['_stop'].call_count == 0
171 assert mocks['_stop'].call_count == 0
172
172
173 mocks['_stop'].assert_called_once_with()
173 mocks['_stop'].assert_called_once_with()
174 assert daemon_context == daemon
174 assert daemon_context == daemon
175
175
176
176
177 class TestHttpHooksCallbackDaemon(object):
177 class TestHttpHooksCallbackDaemon(object):
178 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
178 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
179 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
179 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
180 daemon = hooks_daemon.HttpHooksCallbackDaemon()
180 daemon = hooks_daemon.HttpHooksCallbackDaemon()
181 assert daemon._daemon == tcp_server
181 assert daemon._daemon == tcp_server
182
182
183 _, port = tcp_server.server_address
183 _, port = tcp_server.server_address
184 expected_uri = '{}:{}'.format('127.0.0.1', port)
184 expected_uri = '{}:{}'.format('127.0.0.1', port)
185 msg = 'Preparing HTTP callback daemon at `{}` and ' \
185 msg = 'Preparing HTTP callback daemon at `{}` and ' \
186 'registering hook object'.format(expected_uri)
186 'registering hook object'.format(expected_uri)
187 assert_message_in_log(
187 assert_message_in_log(
188 caplog.records, msg, levelno=logging.DEBUG, module='hooks_daemon')
188 caplog.records, msg, levelno=logging.DEBUG, module='hooks_daemon')
189
189
190 def test_prepare_inits_hooks_uri_and_logs_it(
190 def test_prepare_inits_hooks_uri_and_logs_it(
191 self, tcp_server, caplog):
191 self, tcp_server, caplog):
192 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
192 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
193 daemon = hooks_daemon.HttpHooksCallbackDaemon()
193 daemon = hooks_daemon.HttpHooksCallbackDaemon()
194
194
195 _, port = tcp_server.server_address
195 _, port = tcp_server.server_address
196 expected_uri = '{}:{}'.format('127.0.0.1', port)
196 expected_uri = '{}:{}'.format('127.0.0.1', port)
197 assert daemon.hooks_uri == expected_uri
197 assert daemon.hooks_uri == expected_uri
198
198
199 msg = 'Preparing HTTP callback daemon at `{}` and ' \
199 msg = 'Preparing HTTP callback daemon at `{}` and ' \
200 'registering hook object'.format(expected_uri)
200 'registering hook object'.format(expected_uri)
201 assert_message_in_log(
201 assert_message_in_log(
202 caplog.records, msg,
202 caplog.records, msg,
203 levelno=logging.DEBUG, module='hooks_daemon')
203 levelno=logging.DEBUG, module='hooks_daemon')
204
204
205 def test_run_creates_a_thread(self, tcp_server):
205 def test_run_creates_a_thread(self, tcp_server):
206 thread = mock.Mock()
206 thread = mock.Mock()
207
207
208 with self._tcp_patcher(tcp_server):
208 with self._tcp_patcher(tcp_server):
209 daemon = hooks_daemon.HttpHooksCallbackDaemon()
209 daemon = hooks_daemon.HttpHooksCallbackDaemon()
210
210
211 with self._thread_patcher(thread) as thread_mock:
211 with self._thread_patcher(thread) as thread_mock:
212 daemon._run()
212 daemon._run()
213
213
214 thread_mock.assert_called_once_with(
214 thread_mock.assert_called_once_with(
215 target=tcp_server.serve_forever,
215 target=tcp_server.serve_forever,
216 kwargs={'poll_interval': daemon.POLL_INTERVAL})
216 kwargs={'poll_interval': daemon.POLL_INTERVAL})
217 assert thread.daemon is True
217 assert thread.daemon is True
218 thread.start.assert_called_once_with()
218 thread.start.assert_called_once_with()
219
219
220 def test_run_logs(self, tcp_server, caplog):
220 def test_run_logs(self, tcp_server, caplog):
221
221
222 with self._tcp_patcher(tcp_server):
222 with self._tcp_patcher(tcp_server):
223 daemon = hooks_daemon.HttpHooksCallbackDaemon()
223 daemon = hooks_daemon.HttpHooksCallbackDaemon()
224
224
225 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
225 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
226 daemon._run()
226 daemon._run()
227
227
228 assert_message_in_log(
228 assert_message_in_log(
229 caplog.records,
229 caplog.records,
230 'Running event loop of callback daemon in background thread',
230 'Running event loop of callback daemon in background thread',
231 levelno=logging.DEBUG, module='hooks_daemon')
231 levelno=logging.DEBUG, module='hooks_daemon')
232
232
233 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
233 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
234 thread = mock.Mock()
234 thread = mock.Mock()
235
235
236 with self._tcp_patcher(tcp_server):
236 with self._tcp_patcher(tcp_server):
237 daemon = hooks_daemon.HttpHooksCallbackDaemon()
237 daemon = hooks_daemon.HttpHooksCallbackDaemon()
238
238
239 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
239 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
240 with daemon:
240 with daemon:
241 assert daemon._daemon == tcp_server
241 assert daemon._daemon == tcp_server
242 assert daemon._callback_thread == thread
242 assert daemon._callback_thread == thread
243
243
244 assert daemon._daemon is None
244 assert daemon._daemon is None
245 assert daemon._callback_thread is None
245 assert daemon._callback_thread is None
246 tcp_server.shutdown.assert_called_with()
246 tcp_server.shutdown.assert_called_with()
247 thread.join.assert_called_once_with()
247 thread.join.assert_called_once_with()
248
248
249 assert_message_in_log(
249 assert_message_in_log(
250 caplog.records, 'Waiting for background thread to finish.',
250 caplog.records, 'Waiting for background thread to finish.',
251 levelno=logging.DEBUG, module='hooks_daemon')
251 levelno=logging.DEBUG, module='hooks_daemon')
252
252
253 def _tcp_patcher(self, tcp_server):
253 def _tcp_patcher(self, tcp_server):
254 return mock.patch.object(
254 return mock.patch.object(
255 hooks_daemon, 'TCPServer', return_value=tcp_server)
255 hooks_daemon, 'TCPServer', return_value=tcp_server)
256
256
257 def _thread_patcher(self, thread):
257 def _thread_patcher(self, thread):
258 return mock.patch.object(
258 return mock.patch.object(
259 hooks_daemon.threading, 'Thread', return_value=thread)
259 hooks_daemon.threading, 'Thread', return_value=thread)
260
260
261
261
262 class TestPrepareHooksDaemon(object):
262 class TestPrepareHooksDaemon(object):
263 @pytest.mark.parametrize('protocol', ('http',))
263 @pytest.mark.parametrize('protocol', ('http',))
264 def test_returns_dummy_hooks_callback_daemon_when_using_direct_calls(
264 def test_returns_dummy_hooks_callback_daemon_when_using_direct_calls(
265 self, protocol):
265 self, protocol):
266 expected_extras = {'extra1': 'value1'}
266 expected_extras = {'extra1': 'value1'}
267 callback, extras = hooks_daemon.prepare_callback_daemon(
267 callback, extras = hooks_daemon.prepare_callback_daemon(
268 expected_extras.copy(), protocol=protocol,
268 expected_extras.copy(), protocol=protocol,
269 host='127.0.0.1', use_direct_calls=True)
269 host='127.0.0.1', use_direct_calls=True)
270 assert isinstance(callback, hooks_daemon.DummyHooksCallbackDaemon)
270 assert isinstance(callback, hooks_daemon.DummyHooksCallbackDaemon)
271 expected_extras['hooks_module'] = 'rhodecode.lib.hooks_daemon'
271 expected_extras['hooks_module'] = 'rhodecode.lib.hooks_daemon'
272 expected_extras['time'] = extras['time']
272 expected_extras['time'] = extras['time']
273 assert 'extra1' in extras
273 assert 'extra1' in extras
274
274
275 @pytest.mark.parametrize('protocol, expected_class', (
275 @pytest.mark.parametrize('protocol, expected_class', (
276 ('http', hooks_daemon.HttpHooksCallbackDaemon),
276 ('http', hooks_daemon.HttpHooksCallbackDaemon),
277 ))
277 ))
278 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
278 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
279 self, protocol, expected_class):
279 self, protocol, expected_class):
280 expected_extras = {
280 expected_extras = {
281 'extra1': 'value1',
281 'extra1': 'value1',
282 'txn_id': 'txnid2',
282 'txn_id': 'txnid2',
283 'hooks_protocol': protocol.lower()
283 'hooks_protocol': protocol.lower()
284 }
284 }
285 callback, extras = hooks_daemon.prepare_callback_daemon(
285 callback, extras = hooks_daemon.prepare_callback_daemon(
286 expected_extras.copy(), protocol=protocol, host='127.0.0.1',
286 expected_extras.copy(), protocol=protocol, host='127.0.0.1',
287 use_direct_calls=False,
287 use_direct_calls=False,
288 txn_id='txnid2')
288 txn_id='txnid2')
289 assert isinstance(callback, expected_class)
289 assert isinstance(callback, expected_class)
290 extras.pop('hooks_uri')
290 extras.pop('hooks_uri')
291 expected_extras['time'] = extras['time']
291 expected_extras['time'] = extras['time']
292 assert extras == expected_extras
292 assert extras == expected_extras
293
293
294 @pytest.mark.parametrize('protocol', (
294 @pytest.mark.parametrize('protocol', (
295 'invalid',
295 'invalid',
296 'Http',
296 'Http',
297 'HTTP',
297 'HTTP',
298 ))
298 ))
299 def test_raises_on_invalid_protocol(self, protocol):
299 def test_raises_on_invalid_protocol(self, protocol):
300 expected_extras = {
300 expected_extras = {
301 'extra1': 'value1',
301 'extra1': 'value1',
302 'hooks_protocol': protocol.lower()
302 'hooks_protocol': protocol.lower()
303 }
303 }
304 with pytest.raises(Exception):
304 with pytest.raises(Exception):
305 callback, extras = hooks_daemon.prepare_callback_daemon(
305 callback, extras = hooks_daemon.prepare_callback_daemon(
306 expected_extras.copy(),
306 expected_extras.copy(),
307 protocol=protocol, host='127.0.0.1',
307 protocol=protocol, host='127.0.0.1',
308 use_direct_calls=False)
308 use_direct_calls=False)
309
309
310
310
311 class MockRequest(object):
311 class MockRequest(object):
312 def __init__(self, request):
312 def __init__(self, request):
313 self.request = request
313 self.request = request
314 self.input_stream = StringIO(b'{}'.format(self.request))
314 self.input_stream = StringIO(b'{}'.format(self.request))
315 self.output_stream = StringIO()
315 self.output_stream = StringIO()
316
316
317 def makefile(self, mode, *args, **kwargs):
317 def makefile(self, mode, *args, **kwargs):
318 return self.output_stream if mode == 'wb' else self.input_stream
318 return self.output_stream if mode == 'wb' else self.input_stream
319
319
320
320
321 class MockServer(object):
321 class MockServer(object):
322 def __init__(self, Handler, request):
322 def __init__(self, Handler, request):
323 ip_port = ('0.0.0.0', 8888)
323 ip_port = ('0.0.0.0', 8888)
324 self.request = MockRequest(request)
324 self.request = MockRequest(request)
325 self.handler = Handler(self.request, ip_port, self)
325 self.handler = Handler(self.request, ip_port, self)
326
326
327
327
328 @pytest.fixture
328 @pytest.fixture()
329 def tcp_server():
329 def tcp_server():
330 server = mock.Mock()
330 server = mock.Mock()
331 server.server_address = ('127.0.0.1', 8881)
331 server.server_address = ('127.0.0.1', 8881)
332 return server
332 return server
@@ -1,107 +1,107 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pickle
21 import pickle
22 import pytest
22 import pytest
23
23
24 from sqlalchemy import Column, String, create_engine
24 from sqlalchemy import Column, String, create_engine
25 from sqlalchemy.orm import sessionmaker
25 from sqlalchemy.orm import sessionmaker
26 from sqlalchemy.ext.declarative import declarative_base
26 from sqlalchemy.ext.declarative import declarative_base
27
27
28 from rhodecode.lib.jsonalchemy import (
28 from rhodecode.lib.jsonalchemy import (
29 MutationDict, MutationList, MutationObj, JsonType)
29 MutationDict, MutationList, MutationObj, JsonType)
30
30
31
31
32 @pytest.fixture
32 @pytest.fixture()
33 def engine():
33 def engine():
34 return create_engine('sqlite://')
34 return create_engine('sqlite://')
35
35
36
36
37 @pytest.fixture
37 @pytest.fixture()
38 def session(engine):
38 def session(engine):
39 return sessionmaker(bind=engine)()
39 return sessionmaker(bind=engine)()
40
40
41
41
42 def test_mutation_dict_is_picklable():
42 def test_mutation_dict_is_picklable():
43 mutation_dict = MutationDict({'key1': 'value1', 'key2': 'value2'})
43 mutation_dict = MutationDict({'key1': 'value1', 'key2': 'value2'})
44 dumped = pickle.dumps(mutation_dict)
44 dumped = pickle.dumps(mutation_dict)
45 loaded = pickle.loads(dumped)
45 loaded = pickle.loads(dumped)
46 assert loaded == mutation_dict
46 assert loaded == mutation_dict
47
47
48
48
49 def test_mutation_list_is_picklable():
49 def test_mutation_list_is_picklable():
50 mutation_list = MutationList(['a', 'b', 'c'])
50 mutation_list = MutationList(['a', 'b', 'c'])
51 dumped = pickle.dumps(mutation_list)
51 dumped = pickle.dumps(mutation_list)
52 loaded = pickle.loads(dumped)
52 loaded = pickle.loads(dumped)
53 assert loaded == mutation_list
53 assert loaded == mutation_list
54
54
55
55
56 def test_mutation_dict_with_lists_is_picklable():
56 def test_mutation_dict_with_lists_is_picklable():
57 mutation_dict = MutationDict({
57 mutation_dict = MutationDict({
58 'key': MutationList(['values', MutationDict({'key': 'value'})])
58 'key': MutationList(['values', MutationDict({'key': 'value'})])
59 })
59 })
60 dumped = pickle.dumps(mutation_dict)
60 dumped = pickle.dumps(mutation_dict)
61 loaded = pickle.loads(dumped)
61 loaded = pickle.loads(dumped)
62 assert loaded == mutation_dict
62 assert loaded == mutation_dict
63
63
64
64
65 def test_mutation_types_with_nullable(engine, session):
65 def test_mutation_types_with_nullable(engine, session):
66 # TODO: dan: ideally want to make this parametrized python => sql tests eg:
66 # TODO: dan: ideally want to make this parametrized python => sql tests eg:
67 # (MutationObj, 5) => '5'
67 # (MutationObj, 5) => '5'
68 # (MutationObj, {'a': 5}) => '{"a": 5}'
68 # (MutationObj, {'a': 5}) => '{"a": 5}'
69 # (MutationObj, None) => 'null' <- think about if None is 'null' or NULL
69 # (MutationObj, None) => 'null' <- think about if None is 'null' or NULL
70
70
71 Base = declarative_base()
71 Base = declarative_base()
72
72
73 class DummyModel(Base):
73 class DummyModel(Base):
74 __tablename__ = 'some_table'
74 __tablename__ = 'some_table'
75 name = Column(String, primary_key=True)
75 name = Column(String, primary_key=True)
76 json_list = Column(MutationList.as_mutable(JsonType('list')))
76 json_list = Column(MutationList.as_mutable(JsonType('list')))
77 json_dict = Column(MutationDict.as_mutable(JsonType('dict')))
77 json_dict = Column(MutationDict.as_mutable(JsonType('dict')))
78 json_obj = Column(MutationObj.as_mutable(JsonType()))
78 json_obj = Column(MutationObj.as_mutable(JsonType()))
79
79
80 Base.metadata.create_all(engine)
80 Base.metadata.create_all(engine)
81
81
82 obj_nulls = DummyModel(name='nulls')
82 obj_nulls = DummyModel(name='nulls')
83 obj_stuff = DummyModel(
83 obj_stuff = DummyModel(
84 name='stuff', json_list=[1,2,3], json_dict={'a': 5}, json_obj=9)
84 name='stuff', json_list=[1,2,3], json_dict={'a': 5}, json_obj=9)
85
85
86 session.add(obj_nulls)
86 session.add(obj_nulls)
87 session.add(obj_stuff)
87 session.add(obj_stuff)
88 session.commit()
88 session.commit()
89 session.expire_all()
89 session.expire_all()
90
90
91 assert engine.execute(
91 assert engine.execute(
92 "select * from some_table where name = 'nulls';").first() == (
92 "select * from some_table where name = 'nulls';").first() == (
93 (u'nulls', None, None, None)
93 (u'nulls', None, None, None)
94 )
94 )
95 ret_nulls = session.query(DummyModel).get('nulls')
95 ret_nulls = session.query(DummyModel).get('nulls')
96 assert ret_nulls.json_list == []
96 assert ret_nulls.json_list == []
97 assert ret_nulls.json_dict == {}
97 assert ret_nulls.json_dict == {}
98 assert ret_nulls.json_obj is None
98 assert ret_nulls.json_obj is None
99
99
100 assert engine.execute(
100 assert engine.execute(
101 "select * from some_table where name = 'stuff';").first() == (
101 "select * from some_table where name = 'stuff';").first() == (
102 (u'stuff', u'[1, 2, 3]', u'{"a": 5}', u'9')
102 (u'stuff', u'[1, 2, 3]', u'{"a": 5}', u'9')
103 )
103 )
104 ret_stuff = session.query(DummyModel).get('stuff')
104 ret_stuff = session.query(DummyModel).get('stuff')
105 assert ret_stuff.json_list == [1, 2, 3]
105 assert ret_stuff.json_list == [1, 2, 3]
106 assert ret_stuff.json_dict == {'a': 5}
106 assert ret_stuff.json_dict == {'a': 5}
107 assert ret_stuff.json_obj == 9
107 assert ret_stuff.json_obj == 9
@@ -1,807 +1,807 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.lib.utils2 import safe_str
24 from rhodecode.lib.utils2 import safe_str
25 from rhodecode.model.db import (
25 from rhodecode.model.db import (
26 RhodeCodeUi, RepoRhodeCodeUi, RhodeCodeSetting, RepoRhodeCodeSetting)
26 RhodeCodeUi, RepoRhodeCodeUi, RhodeCodeSetting, RepoRhodeCodeSetting)
27 from rhodecode.model.meta import Session
27 from rhodecode.model.meta import Session
28 from rhodecode.model.settings import SettingsModel, SettingNotFound, UiSetting
28 from rhodecode.model.settings import SettingsModel, SettingNotFound, UiSetting
29
29
30
30
31 class TestRepoGetUiByKey(object):
31 class TestRepoGetUiByKey(object):
32 def test_ui_settings_are_returned_when_key_is_found(
32 def test_ui_settings_are_returned_when_key_is_found(
33 self, repo_stub, settings_util):
33 self, repo_stub, settings_util):
34 section = 'test section'
34 section = 'test section'
35 value = 'test value'
35 value = 'test value'
36
36
37 settings_util.create_repo_rhodecode_ui(
37 settings_util.create_repo_rhodecode_ui(
38 repo_stub, 'wrong section', 'wrong value')
38 repo_stub, 'wrong section', 'wrong value')
39 setting = settings_util.create_repo_rhodecode_ui(
39 setting = settings_util.create_repo_rhodecode_ui(
40 repo_stub, section, value)
40 repo_stub, section, value)
41 key = setting.ui_key
41 key = setting.ui_key
42
42
43 model = SettingsModel(repo=repo_stub.repo_name)
43 model = SettingsModel(repo=repo_stub.repo_name)
44 result = model.get_ui_by_key(key)
44 result = model.get_ui_by_key(key)
45 assert result.ui_value == value
45 assert result.ui_value == value
46 assert result.ui_section == section
46 assert result.ui_section == section
47 assert result.ui_active is True
47 assert result.ui_active is True
48
48
49 def test_none_is_returned_when_key_is_not_found(
49 def test_none_is_returned_when_key_is_not_found(
50 self, repo_stub, settings_util):
50 self, repo_stub, settings_util):
51 settings_util.create_repo_rhodecode_ui(
51 settings_util.create_repo_rhodecode_ui(
52 repo_stub, 'wrong section', 'wrong value')
52 repo_stub, 'wrong section', 'wrong value')
53
53
54 model = SettingsModel(repo=repo_stub.repo_name)
54 model = SettingsModel(repo=repo_stub.repo_name)
55 result = model.get_ui_by_key('abcde')
55 result = model.get_ui_by_key('abcde')
56 assert result is None
56 assert result is None
57
57
58
58
59 class TestGlobalGetUiByKey(object):
59 class TestGlobalGetUiByKey(object):
60 def test_ui_settings_are_returned_when_key_is_found(self, settings_util):
60 def test_ui_settings_are_returned_when_key_is_found(self, settings_util):
61 section = 'test section'
61 section = 'test section'
62 value = 'test value'
62 value = 'test value'
63
63
64 settings_util.create_rhodecode_ui('wrong section', 'wrong value')
64 settings_util.create_rhodecode_ui('wrong section', 'wrong value')
65 setting = settings_util.create_rhodecode_ui(section, value)
65 setting = settings_util.create_rhodecode_ui(section, value)
66 key = setting.ui_key
66 key = setting.ui_key
67
67
68 model = SettingsModel()
68 model = SettingsModel()
69 result = model.get_ui_by_key(key)
69 result = model.get_ui_by_key(key)
70 assert result.ui_value == value
70 assert result.ui_value == value
71 assert result.ui_section == section
71 assert result.ui_section == section
72 assert result.ui_active is True
72 assert result.ui_active is True
73
73
74 def test_none_is_returned_when_key_is_not_found(self, settings_util):
74 def test_none_is_returned_when_key_is_not_found(self, settings_util):
75 settings_util.create_rhodecode_ui('wrong section', 'wrong value')
75 settings_util.create_rhodecode_ui('wrong section', 'wrong value')
76 model = SettingsModel()
76 model = SettingsModel()
77 result = model.get_ui_by_key('abcde')
77 result = model.get_ui_by_key('abcde')
78 assert result is None
78 assert result is None
79
79
80
80
81 class TestRepoGetUiBySection(object):
81 class TestRepoGetUiBySection(object):
82 def test_ui_settings_are_returned_when_section_is_found(
82 def test_ui_settings_are_returned_when_section_is_found(
83 self, repo_stub, settings_util):
83 self, repo_stub, settings_util):
84 section = 'test section'
84 section = 'test section'
85 values = ['test value 1', 'test value 2']
85 values = ['test value 1', 'test value 2']
86
86
87 expected_pairs = []
87 expected_pairs = []
88 for value in values:
88 for value in values:
89 setting = settings_util.create_repo_rhodecode_ui(
89 setting = settings_util.create_repo_rhodecode_ui(
90 repo_stub, section, value)
90 repo_stub, section, value)
91 expected_pairs.append((setting.ui_key, value))
91 expected_pairs.append((setting.ui_key, value))
92
92
93 model = SettingsModel(repo=repo_stub.repo_name)
93 model = SettingsModel(repo=repo_stub.repo_name)
94 result = model.get_ui_by_section(section)
94 result = model.get_ui_by_section(section)
95 result_pairs = [(r.ui_key, r.ui_value) for r in result]
95 result_pairs = [(r.ui_key, r.ui_value) for r in result]
96 assert sorted(result_pairs) == sorted(expected_pairs)
96 assert sorted(result_pairs) == sorted(expected_pairs)
97
97
98 def test_empty_list_is_returned_when_section_is_not_found(
98 def test_empty_list_is_returned_when_section_is_not_found(
99 self, repo_stub, settings_util):
99 self, repo_stub, settings_util):
100 settings_util.create_repo_rhodecode_ui(
100 settings_util.create_repo_rhodecode_ui(
101 repo_stub, 'wrong section', 'wrong value')
101 repo_stub, 'wrong section', 'wrong value')
102
102
103 model = SettingsModel(repo=repo_stub.repo_name)
103 model = SettingsModel(repo=repo_stub.repo_name)
104 result = model.get_ui_by_section('correct section')
104 result = model.get_ui_by_section('correct section')
105 assert result == []
105 assert result == []
106
106
107
107
108 class TestGlobalGetUiBySection(object):
108 class TestGlobalGetUiBySection(object):
109 def test_ui_settings_are_returned_when_section_is_found(
109 def test_ui_settings_are_returned_when_section_is_found(
110 self, settings_util):
110 self, settings_util):
111 section = 'test section'
111 section = 'test section'
112 values = ['test value 1', 'test value 2']
112 values = ['test value 1', 'test value 2']
113
113
114 expected_pairs = []
114 expected_pairs = []
115 for value in values:
115 for value in values:
116 setting = settings_util.create_rhodecode_ui(section, value)
116 setting = settings_util.create_rhodecode_ui(section, value)
117 expected_pairs.append((setting.ui_key, value))
117 expected_pairs.append((setting.ui_key, value))
118
118
119 model = SettingsModel()
119 model = SettingsModel()
120 result = model.get_ui_by_section(section)
120 result = model.get_ui_by_section(section)
121 result_pairs = [(r.ui_key, r.ui_value) for r in result]
121 result_pairs = [(r.ui_key, r.ui_value) for r in result]
122 assert sorted(result_pairs) == sorted(expected_pairs)
122 assert sorted(result_pairs) == sorted(expected_pairs)
123
123
124 def test_empty_list_is_returned_when_section_is_not_found(
124 def test_empty_list_is_returned_when_section_is_not_found(
125 self, settings_util):
125 self, settings_util):
126 settings_util.create_rhodecode_ui('wrong section', 'wrong value')
126 settings_util.create_rhodecode_ui('wrong section', 'wrong value')
127
127
128 model = SettingsModel()
128 model = SettingsModel()
129 result = model.get_ui_by_section('correct section')
129 result = model.get_ui_by_section('correct section')
130 assert result == []
130 assert result == []
131
131
132
132
133 class TestRepoGetUiBySectionAndKey(object):
133 class TestRepoGetUiBySectionAndKey(object):
134 def test_ui_settings_are_returned_when_section_and_key_are_found(
134 def test_ui_settings_are_returned_when_section_and_key_are_found(
135 self, repo_stub, settings_util):
135 self, repo_stub, settings_util):
136 section = 'test section'
136 section = 'test section'
137 value = 'test value'
137 value = 'test value'
138 key = 'test key'
138 key = 'test key'
139
139
140 settings_util.create_rhodecode_ui(
140 settings_util.create_rhodecode_ui(
141 'wrong section', 'wrong value', key='wrong key')
141 'wrong section', 'wrong value', key='wrong key')
142 setting = settings_util.create_repo_rhodecode_ui(
142 setting = settings_util.create_repo_rhodecode_ui(
143 repo_stub, section, value, key=key)
143 repo_stub, section, value, key=key)
144 key = setting.ui_key
144 key = setting.ui_key
145
145
146 model = SettingsModel(repo=repo_stub.repo_name)
146 model = SettingsModel(repo=repo_stub.repo_name)
147 result = model.get_ui_by_section_and_key(section, key)
147 result = model.get_ui_by_section_and_key(section, key)
148 assert result.ui_value == value
148 assert result.ui_value == value
149 assert result.ui_section == section
149 assert result.ui_section == section
150 assert result.ui_active is True
150 assert result.ui_active is True
151
151
152 def test_none_is_returned_when_key_section_pair_is_not_found(
152 def test_none_is_returned_when_key_section_pair_is_not_found(
153 self, repo_stub, settings_util):
153 self, repo_stub, settings_util):
154 settings_util.create_repo_rhodecode_ui(
154 settings_util.create_repo_rhodecode_ui(
155 repo_stub, 'section', 'wrong value', key='wrong key')
155 repo_stub, 'section', 'wrong value', key='wrong key')
156
156
157 model = SettingsModel(repo=repo_stub.repo_name)
157 model = SettingsModel(repo=repo_stub.repo_name)
158 result = model.get_ui_by_section_and_key('section', 'test key')
158 result = model.get_ui_by_section_and_key('section', 'test key')
159 assert result is None
159 assert result is None
160
160
161
161
162 class TestGlobalGetUiBySectionAndKey(object):
162 class TestGlobalGetUiBySectionAndKey(object):
163 def test_ui_settings_are_returned_when_section_and_key_are_found(
163 def test_ui_settings_are_returned_when_section_and_key_are_found(
164 self, settings_util):
164 self, settings_util):
165 section = 'test section'
165 section = 'test section'
166 value = 'test value'
166 value = 'test value'
167 key = 'test key'
167 key = 'test key'
168
168
169 settings_util.create_rhodecode_ui(
169 settings_util.create_rhodecode_ui(
170 'wrong section', 'wrong value', key='wrong key')
170 'wrong section', 'wrong value', key='wrong key')
171 setting = settings_util.create_rhodecode_ui(section, value, key=key)
171 setting = settings_util.create_rhodecode_ui(section, value, key=key)
172 key = setting.ui_key
172 key = setting.ui_key
173
173
174 model = SettingsModel()
174 model = SettingsModel()
175 result = model.get_ui_by_section_and_key(section, key)
175 result = model.get_ui_by_section_and_key(section, key)
176 assert result.ui_value == value
176 assert result.ui_value == value
177 assert result.ui_section == section
177 assert result.ui_section == section
178 assert result.ui_active is True
178 assert result.ui_active is True
179
179
180 def test_none_is_returned_when_key_section_pair_is_not_found(
180 def test_none_is_returned_when_key_section_pair_is_not_found(
181 self, settings_util):
181 self, settings_util):
182 settings_util.create_rhodecode_ui(
182 settings_util.create_rhodecode_ui(
183 'section', 'wrong value', key='wrong key')
183 'section', 'wrong value', key='wrong key')
184 model = SettingsModel()
184 model = SettingsModel()
185 result = model.get_ui_by_section_and_key('section', 'test key')
185 result = model.get_ui_by_section_and_key('section', 'test key')
186 assert result is None
186 assert result is None
187
187
188
188
189 class TestRepoGetUi(object):
189 class TestRepoGetUi(object):
190 def test_non_empty_list_is_returned_when_ui_settings_found(
190 def test_non_empty_list_is_returned_when_ui_settings_found(
191 self, repo_stub, settings_util, fake_ui_values):
191 self, repo_stub, settings_util, fake_ui_values):
192 for ui in fake_ui_values:
192 for ui in fake_ui_values:
193 settings_util.create_repo_rhodecode_ui(
193 settings_util.create_repo_rhodecode_ui(
194 repo_stub, ui.section, ui.value, key=ui.key)
194 repo_stub, ui.section, ui.value, key=ui.key)
195 # Create few global settings to check that only repo ones are
195 # Create few global settings to check that only repo ones are
196 # displayed
196 # displayed
197 settings_util.create_rhodecode_ui(ui.section, ui.value, key=ui.key)
197 settings_util.create_rhodecode_ui(ui.section, ui.value, key=ui.key)
198
198
199 model = SettingsModel(repo=repo_stub.repo_name)
199 model = SettingsModel(repo=repo_stub.repo_name)
200 result = model.get_ui()
200 result = model.get_ui()
201 assert sorted(result) == sorted(fake_ui_values)
201 assert sorted(result) == sorted(fake_ui_values)
202
202
203 def test_settings_filtered_by_section(
203 def test_settings_filtered_by_section(
204 self, repo_stub, settings_util, fake_ui_values):
204 self, repo_stub, settings_util, fake_ui_values):
205 for ui in fake_ui_values:
205 for ui in fake_ui_values:
206 settings_util.create_repo_rhodecode_ui(
206 settings_util.create_repo_rhodecode_ui(
207 repo_stub, ui.section, ui.value, key=ui.key)
207 repo_stub, ui.section, ui.value, key=ui.key)
208
208
209 model = SettingsModel(repo=repo_stub.repo_name)
209 model = SettingsModel(repo=repo_stub.repo_name)
210 result = model.get_ui(section=fake_ui_values[0].section)
210 result = model.get_ui(section=fake_ui_values[0].section)
211 expected_result = [
211 expected_result = [
212 s for s in fake_ui_values
212 s for s in fake_ui_values
213 if s.section == fake_ui_values[0].section]
213 if s.section == fake_ui_values[0].section]
214 assert sorted(result) == sorted(expected_result)
214 assert sorted(result) == sorted(expected_result)
215
215
216 def test_settings_filtered_by_key(
216 def test_settings_filtered_by_key(
217 self, repo_stub, settings_util, fake_ui_values):
217 self, repo_stub, settings_util, fake_ui_values):
218 for ui in fake_ui_values:
218 for ui in fake_ui_values:
219 settings_util.create_repo_rhodecode_ui(
219 settings_util.create_repo_rhodecode_ui(
220 repo_stub, ui.section, ui.value, key=ui.key)
220 repo_stub, ui.section, ui.value, key=ui.key)
221
221
222 model = SettingsModel(repo=repo_stub.repo_name)
222 model = SettingsModel(repo=repo_stub.repo_name)
223 result = model.get_ui(key=fake_ui_values[0].key)
223 result = model.get_ui(key=fake_ui_values[0].key)
224 expected_result = [
224 expected_result = [
225 s for s in fake_ui_values if s.key == fake_ui_values[0].key]
225 s for s in fake_ui_values if s.key == fake_ui_values[0].key]
226 assert sorted(result) == sorted(expected_result)
226 assert sorted(result) == sorted(expected_result)
227
227
228 def test_empty_list_is_returned_when_ui_settings_are_not_found(
228 def test_empty_list_is_returned_when_ui_settings_are_not_found(
229 self, repo_stub, settings_util):
229 self, repo_stub, settings_util):
230 for i in range(10):
230 for i in range(10):
231 settings_util.create_rhodecode_ui(
231 settings_util.create_rhodecode_ui(
232 'section{}'.format(i), 'value{}'.format(i),
232 'section{}'.format(i), 'value{}'.format(i),
233 key='key{}'.format(i), active=True)
233 key='key{}'.format(i), active=True)
234
234
235 model = SettingsModel(repo=repo_stub.repo_name)
235 model = SettingsModel(repo=repo_stub.repo_name)
236 result = model.get_ui()
236 result = model.get_ui()
237 assert result == []
237 assert result == []
238
238
239
239
240 class TestGlobalGetUi(object):
240 class TestGlobalGetUi(object):
241 def test_non_empty_list_is_returned_when_ui_settings_found(
241 def test_non_empty_list_is_returned_when_ui_settings_found(
242 self, backend_stub, settings_util, fake_ui_values):
242 self, backend_stub, settings_util, fake_ui_values):
243 repo = backend_stub.create_repo()
243 repo = backend_stub.create_repo()
244 for ui in fake_ui_values:
244 for ui in fake_ui_values:
245 settings_util.create_rhodecode_ui(ui.section, ui.value, key=ui.key)
245 settings_util.create_rhodecode_ui(ui.section, ui.value, key=ui.key)
246 # Create few repo settings to check that only global ones are
246 # Create few repo settings to check that only global ones are
247 # displayed
247 # displayed
248 settings_util.create_repo_rhodecode_ui(
248 settings_util.create_repo_rhodecode_ui(
249 repo, ui.section, ui.value, key=ui.key)
249 repo, ui.section, ui.value, key=ui.key)
250
250
251 model = SettingsModel()
251 model = SettingsModel()
252 result = model.get_ui()
252 result = model.get_ui()
253 for ui in fake_ui_values:
253 for ui in fake_ui_values:
254 assert ui in result
254 assert ui in result
255
255
256 def test_settings_filtered_by_key(self, settings_util, fake_ui_values):
256 def test_settings_filtered_by_key(self, settings_util, fake_ui_values):
257 for ui in fake_ui_values:
257 for ui in fake_ui_values:
258 settings_util.create_rhodecode_ui(ui.section, ui.value, key=ui.key)
258 settings_util.create_rhodecode_ui(ui.section, ui.value, key=ui.key)
259 expected_result = [
259 expected_result = [
260 s for s in fake_ui_values if s.key == fake_ui_values[0].key]
260 s for s in fake_ui_values if s.key == fake_ui_values[0].key]
261
261
262 model = SettingsModel()
262 model = SettingsModel()
263 result = model.get_ui(key=fake_ui_values[0].key)
263 result = model.get_ui(key=fake_ui_values[0].key)
264 assert sorted(result) == sorted(expected_result)
264 assert sorted(result) == sorted(expected_result)
265
265
266 def test_settings_filtered_by_section(self, settings_util, fake_ui_values):
266 def test_settings_filtered_by_section(self, settings_util, fake_ui_values):
267 for ui in fake_ui_values:
267 for ui in fake_ui_values:
268 settings_util.create_rhodecode_ui(ui.section, ui.value, key=ui.key)
268 settings_util.create_rhodecode_ui(ui.section, ui.value, key=ui.key)
269 expected_result = [
269 expected_result = [
270 s for s in fake_ui_values
270 s for s in fake_ui_values
271 if s.section == fake_ui_values[0].section]
271 if s.section == fake_ui_values[0].section]
272
272
273 model = SettingsModel()
273 model = SettingsModel()
274 result = model.get_ui(section=fake_ui_values[0].section)
274 result = model.get_ui(section=fake_ui_values[0].section)
275 assert sorted(result) == sorted(expected_result)
275 assert sorted(result) == sorted(expected_result)
276
276
277 def test_repo_settings_are_not_displayed(
277 def test_repo_settings_are_not_displayed(
278 self, backend_stub, settings_util, fake_ui_values):
278 self, backend_stub, settings_util, fake_ui_values):
279 repo = backend_stub.create_repo()
279 repo = backend_stub.create_repo()
280 for ui in fake_ui_values:
280 for ui in fake_ui_values:
281 settings_util.create_repo_rhodecode_ui(
281 settings_util.create_repo_rhodecode_ui(
282 repo, ui.section, ui.value, key=ui.key, active=ui.active)
282 repo, ui.section, ui.value, key=ui.key, active=ui.active)
283
283
284 model = SettingsModel()
284 model = SettingsModel()
285 result = model.get_ui()
285 result = model.get_ui()
286 for ui in fake_ui_values:
286 for ui in fake_ui_values:
287 assert ui not in result
287 assert ui not in result
288
288
289
289
290 class TestRepoGetBuiltInHooks(object):
290 class TestRepoGetBuiltInHooks(object):
291 def test_only_builtin_hooks_are_returned(self, repo_stub, settings_util):
291 def test_only_builtin_hooks_are_returned(self, repo_stub, settings_util):
292 section = 'hooks'
292 section = 'hooks'
293 valid_keys = SettingsModel.BUILTIN_HOOKS
293 valid_keys = SettingsModel.BUILTIN_HOOKS
294 invalid_keys = ('fake_hook', )
294 invalid_keys = ('fake_hook', )
295 keys = valid_keys + invalid_keys
295 keys = valid_keys + invalid_keys
296
296
297 for key in keys:
297 for key in keys:
298 settings_util.create_repo_rhodecode_ui(
298 settings_util.create_repo_rhodecode_ui(
299 repo_stub, section, 'test value', key=key)
299 repo_stub, section, 'test value', key=key)
300
300
301 model = SettingsModel(repo=repo_stub.repo_name)
301 model = SettingsModel(repo=repo_stub.repo_name)
302 result = model.get_builtin_hooks()
302 result = model.get_builtin_hooks()
303
303
304 assert len(result) == len(valid_keys)
304 assert len(result) == len(valid_keys)
305 for entry in result:
305 for entry in result:
306 assert entry.ui_key in valid_keys
306 assert entry.ui_key in valid_keys
307
307
308
308
309 class TestGlobalGetBuiltInHooks(object):
309 class TestGlobalGetBuiltInHooks(object):
310 def test_only_builtin_hooks_are_returned(self, settings_util):
310 def test_only_builtin_hooks_are_returned(self, settings_util):
311 section = 'hooks'
311 section = 'hooks'
312 valid_keys = ('valid_key1', 'valid_key2')
312 valid_keys = ('valid_key1', 'valid_key2')
313 invalid_keys = ('fake_hook', )
313 invalid_keys = ('fake_hook', )
314 keys = valid_keys + invalid_keys
314 keys = valid_keys + invalid_keys
315
315
316 for key in keys:
316 for key in keys:
317 settings_util.create_rhodecode_ui(section, 'test value', key=key)
317 settings_util.create_rhodecode_ui(section, 'test value', key=key)
318
318
319 model = SettingsModel()
319 model = SettingsModel()
320 with mock.patch.object(model, 'BUILTIN_HOOKS', valid_keys):
320 with mock.patch.object(model, 'BUILTIN_HOOKS', valid_keys):
321 result = model.get_builtin_hooks()
321 result = model.get_builtin_hooks()
322
322
323 assert len(result) == len(valid_keys)
323 assert len(result) == len(valid_keys)
324 for entry in result:
324 for entry in result:
325 assert entry.ui_key in valid_keys
325 assert entry.ui_key in valid_keys
326
326
327
327
328 class TestRepoGetCustomHooks(object):
328 class TestRepoGetCustomHooks(object):
329 def test_only_custom_hooks_are_returned(self, repo_stub, settings_util):
329 def test_only_custom_hooks_are_returned(self, repo_stub, settings_util):
330 section = 'hooks'
330 section = 'hooks'
331 valid_keys = ('custom', )
331 valid_keys = ('custom', )
332 invalid_keys = SettingsModel.BUILTIN_HOOKS
332 invalid_keys = SettingsModel.BUILTIN_HOOKS
333 keys = valid_keys + invalid_keys
333 keys = valid_keys + invalid_keys
334
334
335 for key in keys:
335 for key in keys:
336 settings_util.create_repo_rhodecode_ui(
336 settings_util.create_repo_rhodecode_ui(
337 repo_stub, section, 'test value', key=key)
337 repo_stub, section, 'test value', key=key)
338
338
339 model = SettingsModel(repo=repo_stub.repo_name)
339 model = SettingsModel(repo=repo_stub.repo_name)
340 result = model.get_custom_hooks()
340 result = model.get_custom_hooks()
341
341
342 assert len(result) == len(valid_keys)
342 assert len(result) == len(valid_keys)
343 for entry in result:
343 for entry in result:
344 assert entry.ui_key in valid_keys
344 assert entry.ui_key in valid_keys
345
345
346
346
347 class TestGlobalGetCustomHooks(object):
347 class TestGlobalGetCustomHooks(object):
348 def test_only_custom_hooks_are_returned(self, settings_util):
348 def test_only_custom_hooks_are_returned(self, settings_util):
349 section = 'hooks'
349 section = 'hooks'
350 valid_keys = ('valid_key1', 'valid_key2')
350 valid_keys = ('valid_key1', 'valid_key2')
351 invalid_keys = ('fake_hook', )
351 invalid_keys = ('fake_hook', )
352 keys = valid_keys + invalid_keys
352 keys = valid_keys + invalid_keys
353
353
354 for key in keys:
354 for key in keys:
355 settings_util.create_rhodecode_ui(section, 'test value', key=key)
355 settings_util.create_rhodecode_ui(section, 'test value', key=key)
356
356
357 model = SettingsModel()
357 model = SettingsModel()
358 with mock.patch.object(model, 'BUILTIN_HOOKS', invalid_keys):
358 with mock.patch.object(model, 'BUILTIN_HOOKS', invalid_keys):
359 result = model.get_custom_hooks()
359 result = model.get_custom_hooks()
360 for entry in result:
360 for entry in result:
361 assert entry.ui_key not in invalid_keys
361 assert entry.ui_key not in invalid_keys
362
362
363
363
364 class TestRepoCreateUiSectionValue(object):
364 class TestRepoCreateUiSectionValue(object):
365 @pytest.mark.parametrize("additional_kwargs", [
365 @pytest.mark.parametrize("additional_kwargs", [
366 {'key': 'abcde'},
366 {'key': 'abcde'},
367 {'active': False},
367 {'active': False},
368 {}
368 {}
369 ])
369 ])
370 def test_ui_section_value_is_created(
370 def test_ui_section_value_is_created(
371 self, repo_stub, additional_kwargs):
371 self, repo_stub, additional_kwargs):
372 model = SettingsModel(repo=repo_stub.repo_name)
372 model = SettingsModel(repo=repo_stub.repo_name)
373 section = 'test section'
373 section = 'test section'
374 value = 'test value'
374 value = 'test value'
375 result = model.create_ui_section_value(section, value)
375 result = model.create_ui_section_value(section, value)
376 key = result.ui_key
376 key = result.ui_key
377 Session().commit()
377 Session().commit()
378
378
379 setting = model.get_ui_by_key(key)
379 setting = model.get_ui_by_key(key)
380 try:
380 try:
381 assert setting == result
381 assert setting == result
382 assert isinstance(setting, RepoRhodeCodeUi)
382 assert isinstance(setting, RepoRhodeCodeUi)
383 finally:
383 finally:
384 Session().delete(result)
384 Session().delete(result)
385 Session().commit()
385 Session().commit()
386
386
387
387
388 class TestGlobalCreateUiSectionValue(object):
388 class TestGlobalCreateUiSectionValue(object):
389 @pytest.mark.parametrize("additional_kwargs", [
389 @pytest.mark.parametrize("additional_kwargs", [
390 {'key': 'abcde'},
390 {'key': 'abcde'},
391 {'active': False},
391 {'active': False},
392 {}
392 {}
393 ])
393 ])
394 def test_ui_section_value_is_created_with_autogenerated_key(
394 def test_ui_section_value_is_created_with_autogenerated_key(
395 self, backend_stub, additional_kwargs):
395 self, backend_stub, additional_kwargs):
396 model = SettingsModel()
396 model = SettingsModel()
397 section = 'test section'
397 section = 'test section'
398 value = 'test value'
398 value = 'test value'
399 result = model.create_ui_section_value(
399 result = model.create_ui_section_value(
400 section, value, **additional_kwargs)
400 section, value, **additional_kwargs)
401 key = result.ui_key
401 key = result.ui_key
402 Session().commit()
402 Session().commit()
403
403
404 setting = model.get_ui_by_key(key)
404 setting = model.get_ui_by_key(key)
405 try:
405 try:
406 assert setting == result
406 assert setting == result
407 assert isinstance(setting, RhodeCodeUi)
407 assert isinstance(setting, RhodeCodeUi)
408 finally:
408 finally:
409 Session().delete(result)
409 Session().delete(result)
410 Session().commit()
410 Session().commit()
411
411
412
412
413 class TestRepoCreateOrUpdateHook(object):
413 class TestRepoCreateOrUpdateHook(object):
414 def test_hook_created(self, repo_stub):
414 def test_hook_created(self, repo_stub):
415 model = SettingsModel(repo=repo_stub.repo_name)
415 model = SettingsModel(repo=repo_stub.repo_name)
416 key = 'test_key'
416 key = 'test_key'
417 value = 'test value'
417 value = 'test value'
418 result = model.create_or_update_hook(key, value)
418 result = model.create_or_update_hook(key, value)
419 Session().commit()
419 Session().commit()
420
420
421 setting = model.get_ui_by_section_and_key('hooks', key)
421 setting = model.get_ui_by_section_and_key('hooks', key)
422 try:
422 try:
423 assert setting == result
423 assert setting == result
424 assert isinstance(setting, RepoRhodeCodeUi)
424 assert isinstance(setting, RepoRhodeCodeUi)
425 finally:
425 finally:
426 Session().delete(result)
426 Session().delete(result)
427 Session().commit()
427 Session().commit()
428
428
429 def test_hook_updated(self, repo_stub, settings_util):
429 def test_hook_updated(self, repo_stub, settings_util):
430 section = 'hooks'
430 section = 'hooks'
431 key = 'test_key'
431 key = 'test_key'
432
432
433 settings_util.create_repo_rhodecode_ui(
433 settings_util.create_repo_rhodecode_ui(
434 repo_stub, section, 'old value', key=key)
434 repo_stub, section, 'old value', key=key)
435
435
436 model = SettingsModel(repo=repo_stub.repo_name)
436 model = SettingsModel(repo=repo_stub.repo_name)
437 value = 'test value'
437 value = 'test value'
438 model.create_or_update_hook(key, value)
438 model.create_or_update_hook(key, value)
439 Session().commit()
439 Session().commit()
440
440
441 setting = model.get_ui_by_section_and_key('hooks', key)
441 setting = model.get_ui_by_section_and_key('hooks', key)
442 assert setting.ui_value == value
442 assert setting.ui_value == value
443
443
444
444
445 class TestGlobalCreateOrUpdateHook(object):
445 class TestGlobalCreateOrUpdateHook(object):
446 def test_hook_created(self):
446 def test_hook_created(self):
447 model = SettingsModel()
447 model = SettingsModel()
448 key = 'test_key'
448 key = 'test_key'
449 value = 'test value'
449 value = 'test value'
450 result = model.create_or_update_hook(key, value)
450 result = model.create_or_update_hook(key, value)
451 Session().commit()
451 Session().commit()
452
452
453 setting = model.get_ui_by_section_and_key('hooks', key)
453 setting = model.get_ui_by_section_and_key('hooks', key)
454 try:
454 try:
455 assert setting == result
455 assert setting == result
456 assert isinstance(setting, RhodeCodeUi)
456 assert isinstance(setting, RhodeCodeUi)
457 finally:
457 finally:
458 Session().delete(result)
458 Session().delete(result)
459 Session().commit()
459 Session().commit()
460
460
461 def test_hook_updated(self, settings_util):
461 def test_hook_updated(self, settings_util):
462 section = 'hooks'
462 section = 'hooks'
463 key = 'test_key'
463 key = 'test_key'
464
464
465 settings_util.create_rhodecode_ui(section, 'old value', key=key)
465 settings_util.create_rhodecode_ui(section, 'old value', key=key)
466
466
467 model = SettingsModel()
467 model = SettingsModel()
468 value = 'test value'
468 value = 'test value'
469 model.create_or_update_hook(key, value)
469 model.create_or_update_hook(key, value)
470 Session().commit()
470 Session().commit()
471
471
472 setting = model.get_ui_by_section_and_key('hooks', key)
472 setting = model.get_ui_by_section_and_key('hooks', key)
473 assert setting.ui_value == value
473 assert setting.ui_value == value
474
474
475
475
476 class TestDeleteUiValue(object):
476 class TestDeleteUiValue(object):
477 def test_delete_ui_when_repo_is_set(self, repo_stub, settings_util):
477 def test_delete_ui_when_repo_is_set(self, repo_stub, settings_util):
478 model = SettingsModel(repo=repo_stub.repo_name)
478 model = SettingsModel(repo=repo_stub.repo_name)
479 result = settings_util.create_repo_rhodecode_ui(
479 result = settings_util.create_repo_rhodecode_ui(
480 repo_stub, 'section', None, cleanup=False)
480 repo_stub, 'section', None, cleanup=False)
481
481
482 key = result.ui_key
482 key = result.ui_key
483 model.delete_ui(result.ui_id)
483 model.delete_ui(result.ui_id)
484 Session().commit()
484 Session().commit()
485
485
486 setting = model.get_ui_by_key(key)
486 setting = model.get_ui_by_key(key)
487 assert setting is None
487 assert setting is None
488
488
489 @pytest.mark.parametrize('id_', (None, 123))
489 @pytest.mark.parametrize('id_', (None, 123))
490 def test_raises_exception_when_id_is_not_specified(self, id_):
490 def test_raises_exception_when_id_is_not_specified(self, id_):
491 model = SettingsModel()
491 model = SettingsModel()
492 with pytest.raises(SettingNotFound) as exc_info:
492 with pytest.raises(SettingNotFound) as exc_info:
493 model.delete_ui(id_)
493 model.delete_ui(id_)
494 assert str(exc_info.value) == 'Setting `{}` is not found'.format(id_)
494 assert str(exc_info.value) == 'Setting `{}` is not found'.format(id_)
495
495
496 def test_delete_ui_when_repo_is_not_set(self, settings_util):
496 def test_delete_ui_when_repo_is_not_set(self, settings_util):
497 model = SettingsModel()
497 model = SettingsModel()
498 result = settings_util.create_rhodecode_ui(
498 result = settings_util.create_rhodecode_ui(
499 'section', None, cleanup=False)
499 'section', None, cleanup=False)
500
500
501 key = result.ui_key
501 key = result.ui_key
502 model.delete_ui(result.ui_id)
502 model.delete_ui(result.ui_id)
503 Session().commit()
503 Session().commit()
504
504
505 setting = model.get_ui_by_key(key)
505 setting = model.get_ui_by_key(key)
506 assert setting is None
506 assert setting is None
507
507
508
508
509 class TestRepoGetSettingByName(object):
509 class TestRepoGetSettingByName(object):
510 @pytest.mark.parametrize("name, value, type_, expected_value", [
510 @pytest.mark.parametrize("name, value, type_, expected_value", [
511 ('test_unicode', 'Straße', 'unicode', 'Straße'),
511 ('test_unicode', 'Straße', 'unicode', 'Straße'),
512 ('test_int', '1234', 'int', 1234),
512 ('test_int', '1234', 'int', 1234),
513 ('test_bool', 'True', 'bool', True),
513 ('test_bool', 'True', 'bool', True),
514 ('test_list', 'a,b,c', 'list', ['a', 'b', 'c'])
514 ('test_list', 'a,b,c', 'list', ['a', 'b', 'c'])
515 ])
515 ])
516 def test_setting_is_returned_when_name_is_found(
516 def test_setting_is_returned_when_name_is_found(
517 self, repo_stub, settings_util, name, value, type_,
517 self, repo_stub, settings_util, name, value, type_,
518 expected_value):
518 expected_value):
519 settings_util.create_repo_rhodecode_setting(
519 settings_util.create_repo_rhodecode_setting(
520 repo_stub, name, value, type_)
520 repo_stub, name, value, type_)
521
521
522 model = SettingsModel(repo=repo_stub.repo_name)
522 model = SettingsModel(repo=repo_stub.repo_name)
523 setting = model.get_setting_by_name(name)
523 setting = model.get_setting_by_name(name)
524 assert setting.app_settings_type == type_
524 assert setting.app_settings_type == type_
525 actual_value = setting.app_settings_value
525 actual_value = setting.app_settings_value
526 if type_ == 'unicode':
526 if type_ == 'unicode':
527 actual_value = safe_str(actual_value)
527 actual_value = safe_str(actual_value)
528 assert actual_value == expected_value
528 assert actual_value == expected_value
529
529
530 def test_returns_none_if_the_setting_does_not_exist(self, repo_stub):
530 def test_returns_none_if_the_setting_does_not_exist(self, repo_stub):
531 model = SettingsModel(repo=repo_stub.repo_name)
531 model = SettingsModel(repo=repo_stub.repo_name)
532 setting = model.get_setting_by_name('abcde')
532 setting = model.get_setting_by_name('abcde')
533 assert setting is None
533 assert setting is None
534
534
535
535
536 class TestGlobalGetSettingByName(object):
536 class TestGlobalGetSettingByName(object):
537 @pytest.mark.parametrize("name, value, type_, expected_value", [
537 @pytest.mark.parametrize("name, value, type_, expected_value", [
538 ('test_unicode', 'Straße', 'unicode', 'Straße'),
538 ('test_unicode', 'Straße', 'unicode', 'Straße'),
539 ('test_int', '1234', 'int', 1234),
539 ('test_int', '1234', 'int', 1234),
540 ('test_bool', 'True', 'bool', True),
540 ('test_bool', 'True', 'bool', True),
541 ('test_list', 'a,b,c', 'list', ['a', 'b', 'c'])
541 ('test_list', 'a,b,c', 'list', ['a', 'b', 'c'])
542 ])
542 ])
543 def test_setting_is_returned_when_name_is_found(
543 def test_setting_is_returned_when_name_is_found(
544 self, settings_util, name, value, type_, expected_value):
544 self, settings_util, name, value, type_, expected_value):
545 settings_util.create_rhodecode_setting(name, value, type_)
545 settings_util.create_rhodecode_setting(name, value, type_)
546
546
547 model = SettingsModel()
547 model = SettingsModel()
548 setting = model.get_setting_by_name(name)
548 setting = model.get_setting_by_name(name)
549 assert setting.app_settings_type == type_
549 assert setting.app_settings_type == type_
550 actual_value = setting.app_settings_value
550 actual_value = setting.app_settings_value
551 if type_ == 'unicode':
551 if type_ == 'unicode':
552 actual_value = safe_str(actual_value)
552 actual_value = safe_str(actual_value)
553 assert actual_value == expected_value
553 assert actual_value == expected_value
554
554
555 def test_returns_none_if_the_setting_does_not_exist(self):
555 def test_returns_none_if_the_setting_does_not_exist(self):
556 model = SettingsModel()
556 model = SettingsModel()
557 setting = model.get_setting_by_name('abcde')
557 setting = model.get_setting_by_name('abcde')
558 assert setting is None
558 assert setting is None
559
559
560
560
561 class TestRepoGetAllSettings(object):
561 class TestRepoGetAllSettings(object):
562 def test_settings_are_found(self, repo_stub, settings_util):
562 def test_settings_are_found(self, repo_stub, settings_util):
563 initial_settings = {
563 initial_settings = {
564 'test_setting_{}'.format(i): 'value' for i in range(10)}
564 'test_setting_{}'.format(i): 'value' for i in range(10)}
565 settings = [
565 settings = [
566 settings_util.create_repo_rhodecode_setting(
566 settings_util.create_repo_rhodecode_setting(
567 repo_stub, name, initial_settings[name], 'unicode')
567 repo_stub, name, initial_settings[name], 'unicode')
568 for name in initial_settings
568 for name in initial_settings
569 ]
569 ]
570 model = SettingsModel(repo=repo_stub.repo_name)
570 model = SettingsModel(repo=repo_stub.repo_name)
571
571
572 settings = model.get_all_settings()
572 settings = model.get_all_settings()
573 expected_settings = {
573 expected_settings = {
574 'rhodecode_' + name: initial_settings[name]
574 'rhodecode_' + name: initial_settings[name]
575 for name in initial_settings
575 for name in initial_settings
576 }
576 }
577
577
578 assert len(settings) == 10
578 assert len(settings) == 10
579 assert expected_settings == settings
579 assert expected_settings == settings
580
580
581 def test_settings_are_not_found(self, repo_stub):
581 def test_settings_are_not_found(self, repo_stub):
582 model = SettingsModel(repo=repo_stub.repo_name)
582 model = SettingsModel(repo=repo_stub.repo_name)
583 setting = model.get_all_settings()
583 setting = model.get_all_settings()
584 assert setting == {}
584 assert setting == {}
585
585
586
586
587 class TestGlobalGetAllSettings(object):
587 class TestGlobalGetAllSettings(object):
588 def test_settings_are_found(self, settings_util):
588 def test_settings_are_found(self, settings_util):
589 initial_settings = {
589 initial_settings = {
590 'test_setting_{}'.format(i): 'value' for i in range(10)}
590 'test_setting_{}'.format(i): 'value' for i in range(10)}
591 settings = [
591 settings = [
592 settings_util.create_rhodecode_setting(
592 settings_util.create_rhodecode_setting(
593 name, initial_settings[name], 'unicode')
593 name, initial_settings[name], 'unicode')
594 for name in initial_settings
594 for name in initial_settings
595 ]
595 ]
596 model = SettingsModel()
596 model = SettingsModel()
597
597
598 settings = model.get_all_settings()
598 settings = model.get_all_settings()
599 expected_settings = {
599 expected_settings = {
600 'rhodecode_' + name: initial_settings[name]
600 'rhodecode_' + name: initial_settings[name]
601 for name in initial_settings
601 for name in initial_settings
602 }
602 }
603
603
604 filtered_settings = {
604 filtered_settings = {
605 name: settings[name]
605 name: settings[name]
606 for name in settings if name.startswith('rhodecode_test_setting')
606 for name in settings if name.startswith('rhodecode_test_setting')
607 }
607 }
608 assert len(filtered_settings) == 10
608 assert len(filtered_settings) == 10
609 assert expected_settings == filtered_settings
609 assert expected_settings == filtered_settings
610
610
611 def test_settings_are_not_found(self, repo_stub):
611 def test_settings_are_not_found(self, repo_stub):
612 model = SettingsModel(repo=repo_stub.repo_name)
612 model = SettingsModel(repo=repo_stub.repo_name)
613 setting = model.get_all_settings()
613 setting = model.get_all_settings()
614 assert setting == {}
614 assert setting == {}
615
615
616
616
617 class TestRepoCreateOrUpdateSetting(object):
617 class TestRepoCreateOrUpdateSetting(object):
618 def test_setting_is_created(self, repo_stub):
618 def test_setting_is_created(self, repo_stub):
619 model = SettingsModel(repo=repo_stub.repo_name)
619 model = SettingsModel(repo=repo_stub.repo_name)
620 name = 'test_setting'
620 name = 'test_setting'
621 value = 'test_value'
621 value = 'test_value'
622 model.create_or_update_setting(name, val=value)
622 model.create_or_update_setting(name, val=value)
623
623
624 setting = model.get_setting_by_name(name)
624 setting = model.get_setting_by_name(name)
625 try:
625 try:
626 assert setting.app_settings_name == name
626 assert setting.app_settings_name == name
627 assert setting.app_settings_value == value
627 assert setting.app_settings_value == value
628 assert setting.app_settings_type == 'unicode'
628 assert setting.app_settings_type == 'unicode'
629 assert isinstance(setting, RepoRhodeCodeSetting)
629 assert isinstance(setting, RepoRhodeCodeSetting)
630 finally:
630 finally:
631 Session().delete(setting)
631 Session().delete(setting)
632 Session().commit()
632 Session().commit()
633
633
634 def test_setting_is_updated(self, repo_stub, settings_util):
634 def test_setting_is_updated(self, repo_stub, settings_util):
635 model = SettingsModel(repo=repo_stub.repo_name)
635 model = SettingsModel(repo=repo_stub.repo_name)
636 name = 'test_setting'
636 name = 'test_setting'
637 value = 'test_value'
637 value = 'test_value'
638 settings_util.create_repo_rhodecode_setting(
638 settings_util.create_repo_rhodecode_setting(
639 repo_stub, name, value, 'unicode', cleanup=False)
639 repo_stub, name, value, 'unicode', cleanup=False)
640
640
641 updated_value = 'test_value_2'
641 updated_value = 'test_value_2'
642 model.create_or_update_setting(name, val=updated_value)
642 model.create_or_update_setting(name, val=updated_value)
643
643
644 setting = model.get_setting_by_name(name)
644 setting = model.get_setting_by_name(name)
645 try:
645 try:
646 assert setting.app_settings_name == name
646 assert setting.app_settings_name == name
647 assert setting.app_settings_value == updated_value
647 assert setting.app_settings_value == updated_value
648 assert setting.app_settings_type == 'unicode'
648 assert setting.app_settings_type == 'unicode'
649 assert isinstance(setting, RepoRhodeCodeSetting)
649 assert isinstance(setting, RepoRhodeCodeSetting)
650 finally:
650 finally:
651 Session().delete(setting)
651 Session().delete(setting)
652 Session().commit()
652 Session().commit()
653
653
654
654
655 class TestGlobalCreateOrUpdateSetting(object):
655 class TestGlobalCreateOrUpdateSetting(object):
656 def test_setting_is_created(self):
656 def test_setting_is_created(self):
657 model = SettingsModel()
657 model = SettingsModel()
658 name = 'test_setting'
658 name = 'test_setting'
659 value = 'test_value'
659 value = 'test_value'
660 model.create_or_update_setting(name, val=value)
660 model.create_or_update_setting(name, val=value)
661
661
662 setting = model.get_setting_by_name(name)
662 setting = model.get_setting_by_name(name)
663 try:
663 try:
664 assert setting.app_settings_name == name
664 assert setting.app_settings_name == name
665 assert setting.app_settings_value == value
665 assert setting.app_settings_value == value
666 assert setting.app_settings_type == 'unicode'
666 assert setting.app_settings_type == 'unicode'
667 assert isinstance(setting, RhodeCodeSetting)
667 assert isinstance(setting, RhodeCodeSetting)
668 finally:
668 finally:
669 Session().delete(setting)
669 Session().delete(setting)
670 Session().commit()
670 Session().commit()
671
671
672 def test_setting_is_updated(self, settings_util):
672 def test_setting_is_updated(self, settings_util):
673 model = SettingsModel()
673 model = SettingsModel()
674 name = 'test_setting'
674 name = 'test_setting'
675 value = 'test_value'
675 value = 'test_value'
676 settings_util.create_rhodecode_setting(
676 settings_util.create_rhodecode_setting(
677 name, value, 'unicode', cleanup=False)
677 name, value, 'unicode', cleanup=False)
678
678
679 updated_value = 'test_value_2'
679 updated_value = 'test_value_2'
680 model.create_or_update_setting(name, val=updated_value)
680 model.create_or_update_setting(name, val=updated_value)
681
681
682 setting = model.get_setting_by_name(name)
682 setting = model.get_setting_by_name(name)
683 try:
683 try:
684 assert setting.app_settings_name == name
684 assert setting.app_settings_name == name
685 assert setting.app_settings_value == updated_value
685 assert setting.app_settings_value == updated_value
686 assert setting.app_settings_type == 'unicode'
686 assert setting.app_settings_type == 'unicode'
687 assert isinstance(setting, RhodeCodeSetting)
687 assert isinstance(setting, RhodeCodeSetting)
688 finally:
688 finally:
689 Session().delete(setting)
689 Session().delete(setting)
690 Session().commit()
690 Session().commit()
691
691
692
692
693 class TestRepoGetAuthSettings(object):
693 class TestRepoGetAuthSettings(object):
694 def test_settings_prefixed_with_auth_are_retured(
694 def test_settings_prefixed_with_auth_are_retured(
695 self, repo_stub, settings_util):
695 self, repo_stub, settings_util):
696 model = SettingsModel(repo=repo_stub.repo_name)
696 model = SettingsModel(repo=repo_stub.repo_name)
697 valid_settings = ('auth_test1', 'auth_test2')
697 valid_settings = ('auth_test1', 'auth_test2')
698 invalid_settings = ('test1', 'test2')
698 invalid_settings = ('test1', 'test2')
699 fake_value = 'test_value'
699 fake_value = 'test_value'
700
700
701 for name in valid_settings + invalid_settings:
701 for name in valid_settings + invalid_settings:
702 settings_util.create_repo_rhodecode_setting(
702 settings_util.create_repo_rhodecode_setting(
703 repo_stub, name, fake_value, 'unicode')
703 repo_stub, name, fake_value, 'unicode')
704
704
705 auth_settings = model.get_auth_settings()
705 auth_settings = model.get_auth_settings()
706 assert auth_settings == {name: fake_value for name in valid_settings}
706 assert auth_settings == {name: fake_value for name in valid_settings}
707
707
708
708
709 class TestGlobalGetAuthSettings(object):
709 class TestGlobalGetAuthSettings(object):
710 def test_settings_prefixed_with_auth_are_retured(self, settings_util):
710 def test_settings_prefixed_with_auth_are_retured(self, settings_util):
711 model = SettingsModel()
711 model = SettingsModel()
712 valid_settings = ('auth_test1', 'auth_test2')
712 valid_settings = ('auth_test1', 'auth_test2')
713 invalid_settings = ('test1', 'test2')
713 invalid_settings = ('test1', 'test2')
714 fake_value = 'test_value'
714 fake_value = 'test_value'
715
715
716 for name in valid_settings + invalid_settings:
716 for name in valid_settings + invalid_settings:
717 settings_util.create_rhodecode_setting(name, fake_value, 'unicode')
717 settings_util.create_rhodecode_setting(name, fake_value, 'unicode')
718
718
719 auth_settings = model.get_auth_settings()
719 auth_settings = model.get_auth_settings()
720 for name in auth_settings:
720 for name in auth_settings:
721 assert name not in invalid_settings
721 assert name not in invalid_settings
722 if name in valid_settings:
722 if name in valid_settings:
723 assert auth_settings[name] == fake_value
723 assert auth_settings[name] == fake_value
724
724
725
725
726 class TestGetAuthPlugins(object):
726 class TestGetAuthPlugins(object):
727 def test_get_setting_by_name_is_called(self):
727 def test_get_setting_by_name_is_called(self):
728 model = SettingsModel()
728 model = SettingsModel()
729
729
730 fake_value = 'some value'
730 fake_value = 'some value'
731 result_mock = mock.Mock()
731 result_mock = mock.Mock()
732 result_mock.app_settings_value = fake_value
732 result_mock.app_settings_value = fake_value
733
733
734 get_setting_patch = mock.patch.object(
734 get_setting_patch = mock.patch.object(
735 model, 'get_setting_by_name', return_value=result_mock)
735 model, 'get_setting_by_name', return_value=result_mock)
736
736
737 with get_setting_patch as get_setting_mock:
737 with get_setting_patch as get_setting_mock:
738 result = model.get_auth_plugins()
738 result = model.get_auth_plugins()
739
739
740 get_setting_mock.assert_called_once_with('auth_plugins')
740 get_setting_mock.assert_called_once_with('auth_plugins')
741 assert result == fake_value
741 assert result == fake_value
742
742
743
743
744 class TestDefaultRepoSettings(object):
744 class TestDefaultRepoSettings(object):
745 DEFAULT_SETTINGS_NAMES = ['default_a{}'.format(i) for i in range(10)]
745 DEFAULT_SETTINGS_NAMES = ['default_a{}'.format(i) for i in range(10)]
746 CUSTOM_SETTINGS_NAMES = ['setting_b_{}'.format(i) for i in range(10)]
746 CUSTOM_SETTINGS_NAMES = ['setting_b_{}'.format(i) for i in range(10)]
747
747
748 def test_returns_global_settings_prefixed_with_default(
748 def test_returns_global_settings_prefixed_with_default(
749 self, settings_util):
749 self, settings_util):
750 self._create_values(settings_util)
750 self._create_values(settings_util)
751 model = SettingsModel()
751 model = SettingsModel()
752 result = model.get_default_repo_settings()
752 result = model.get_default_repo_settings()
753 self._assert_prefixed_settings(result)
753 self._assert_prefixed_settings(result)
754
754
755 def test_returns_global_settings_without_default_prefix(
755 def test_returns_global_settings_without_default_prefix(
756 self, settings_util):
756 self, settings_util):
757 self._create_values(settings_util)
757 self._create_values(settings_util)
758 model = SettingsModel()
758 model = SettingsModel()
759 result = model.get_default_repo_settings(strip_prefix=True)
759 result = model.get_default_repo_settings(strip_prefix=True)
760 self._assert_non_prefixed_settings(result)
760 self._assert_non_prefixed_settings(result)
761
761
762 def test_returns_per_repo_settings_prefixed_with_default(
762 def test_returns_per_repo_settings_prefixed_with_default(
763 self, repo_stub, settings_util):
763 self, repo_stub, settings_util):
764 model = SettingsModel(repo=repo_stub)
764 model = SettingsModel(repo=repo_stub)
765 self._create_values(settings_util, repo=repo_stub)
765 self._create_values(settings_util, repo=repo_stub)
766 result = model.get_default_repo_settings()
766 result = model.get_default_repo_settings()
767 self._assert_prefixed_settings(result)
767 self._assert_prefixed_settings(result)
768
768
769 def test_returns_per_repo_settings_without_default_prefix(
769 def test_returns_per_repo_settings_without_default_prefix(
770 self, repo_stub, settings_util):
770 self, repo_stub, settings_util):
771 model = SettingsModel(repo=repo_stub)
771 model = SettingsModel(repo=repo_stub)
772 self._create_values(settings_util, repo=repo_stub)
772 self._create_values(settings_util, repo=repo_stub)
773 result = model.get_default_repo_settings(strip_prefix=True)
773 result = model.get_default_repo_settings(strip_prefix=True)
774 self._assert_non_prefixed_settings(result)
774 self._assert_non_prefixed_settings(result)
775
775
776 def _create_values(self, settings_util, repo=None):
776 def _create_values(self, settings_util, repo=None):
777 for name in self.DEFAULT_SETTINGS_NAMES + self.CUSTOM_SETTINGS_NAMES:
777 for name in self.DEFAULT_SETTINGS_NAMES + self.CUSTOM_SETTINGS_NAMES:
778 if not repo:
778 if not repo:
779 settings_util.create_rhodecode_setting(
779 settings_util.create_rhodecode_setting(
780 name, 'value', 'unicode')
780 name, 'value', 'unicode')
781 else:
781 else:
782 settings_util.create_repo_rhodecode_setting(
782 settings_util.create_repo_rhodecode_setting(
783 repo, name, 'value', 'unicode')
783 repo, name, 'value', 'unicode')
784
784
785 def _assert_prefixed_settings(self, result):
785 def _assert_prefixed_settings(self, result):
786 for setting in self.DEFAULT_SETTINGS_NAMES:
786 for setting in self.DEFAULT_SETTINGS_NAMES:
787 assert setting in result
787 assert setting in result
788 assert result[setting] == 'value'
788 assert result[setting] == 'value'
789
789
790 for setting in self.CUSTOM_SETTINGS_NAMES:
790 for setting in self.CUSTOM_SETTINGS_NAMES:
791 assert setting not in result
791 assert setting not in result
792
792
793 def _assert_non_prefixed_settings(self, result):
793 def _assert_non_prefixed_settings(self, result):
794 for setting in self.DEFAULT_SETTINGS_NAMES:
794 for setting in self.DEFAULT_SETTINGS_NAMES:
795 setting = setting.replace('default_', '')
795 setting = setting.replace('default_', '')
796 assert setting in result
796 assert setting in result
797 assert result[setting] == 'value'
797 assert result[setting] == 'value'
798
798
799
799
800 @pytest.fixture
800 @pytest.fixture()
801 def fake_ui_values():
801 def fake_ui_values():
802 return [
802 return [
803 UiSetting(
803 UiSetting(
804 'section{}'.format(i % 2), 'key{}'.format(i),
804 'section{}'.format(i % 2), 'key{}'.format(i),
805 'value{}'.format(i), True)
805 'value{}'.format(i), True)
806 for i in range(10)
806 for i in range(10)
807 ]
807 ]
@@ -1,735 +1,735 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib.auth import AuthUser
23 from rhodecode.lib.auth import AuthUser
24 from rhodecode.model.db import (
24 from rhodecode.model.db import (
25 RepoGroup, User, UserGroupRepoGroupToPerm, Permission, UserToPerm,
25 RepoGroup, User, UserGroupRepoGroupToPerm, Permission, UserToPerm,
26 UserGroupToPerm)
26 UserGroupToPerm)
27 from rhodecode.model.meta import Session
27 from rhodecode.model.meta import Session
28 from rhodecode.model.permission import PermissionModel
28 from rhodecode.model.permission import PermissionModel
29 from rhodecode.model.repo import RepoModel
29 from rhodecode.model.repo import RepoModel
30 from rhodecode.model.repo_group import RepoGroupModel
30 from rhodecode.model.repo_group import RepoGroupModel
31 from rhodecode.model.user import UserModel
31 from rhodecode.model.user import UserModel
32 from rhodecode.model.user_group import UserGroupModel
32 from rhodecode.model.user_group import UserGroupModel
33 from rhodecode.tests.fixture import Fixture
33 from rhodecode.tests.fixture import Fixture
34
34
35
35
36 fixture = Fixture()
36 fixture = Fixture()
37
37
38
38
39 @pytest.fixture
39 @pytest.fixture()
40 def repo_name(backend_hg):
40 def repo_name(backend_hg):
41 return backend_hg.repo_name
41 return backend_hg.repo_name
42
42
43
43
44 class TestPermissions(object):
44 class TestPermissions(object):
45
45
46 @pytest.fixture(scope='class', autouse=True)
46 @pytest.fixture(scope='class', autouse=True)
47 def default_permissions(self, request, baseapp):
47 def default_permissions(self, request, baseapp):
48 # recreate default user to get a clean start
48 # recreate default user to get a clean start
49 PermissionModel().create_default_user_permissions(
49 PermissionModel().create_default_user_permissions(
50 user=User.DEFAULT_USER, force=True)
50 user=User.DEFAULT_USER, force=True)
51 Session().commit()
51 Session().commit()
52
52
53 @pytest.fixture(autouse=True)
53 @pytest.fixture(autouse=True)
54 def prepare_users(self, request):
54 def prepare_users(self, request):
55 # TODO: User creation is a duplicate of test_nofitications, check
55 # TODO: User creation is a duplicate of test_nofitications, check
56 # if that can be unified
56 # if that can be unified
57 self.u1 = UserModel().create_or_update(
57 self.u1 = UserModel().create_or_update(
58 username=u'u1', password=u'qweqwe',
58 username=u'u1', password=u'qweqwe',
59 email=u'u1@rhodecode.org', firstname=u'u1', lastname=u'u1'
59 email=u'u1@rhodecode.org', firstname=u'u1', lastname=u'u1'
60 )
60 )
61 self.u2 = UserModel().create_or_update(
61 self.u2 = UserModel().create_or_update(
62 username=u'u2', password=u'qweqwe',
62 username=u'u2', password=u'qweqwe',
63 email=u'u2@rhodecode.org', firstname=u'u2', lastname=u'u2'
63 email=u'u2@rhodecode.org', firstname=u'u2', lastname=u'u2'
64 )
64 )
65 self.u3 = UserModel().create_or_update(
65 self.u3 = UserModel().create_or_update(
66 username=u'u3', password=u'qweqwe',
66 username=u'u3', password=u'qweqwe',
67 email=u'u3@rhodecode.org', firstname=u'u3', lastname=u'u3'
67 email=u'u3@rhodecode.org', firstname=u'u3', lastname=u'u3'
68 )
68 )
69 self.anon = User.get_default_user()
69 self.anon = User.get_default_user()
70 self.a1 = UserModel().create_or_update(
70 self.a1 = UserModel().create_or_update(
71 username=u'a1', password=u'qweqwe',
71 username=u'a1', password=u'qweqwe',
72 email=u'a1@rhodecode.org', firstname=u'a1', lastname=u'a1',
72 email=u'a1@rhodecode.org', firstname=u'a1', lastname=u'a1',
73 admin=True
73 admin=True
74 )
74 )
75 Session().commit()
75 Session().commit()
76
76
77 request.addfinalizer(self.cleanup)
77 request.addfinalizer(self.cleanup)
78
78
79 def cleanup(self):
79 def cleanup(self):
80 if hasattr(self, 'test_repo'):
80 if hasattr(self, 'test_repo'):
81 RepoModel().delete(repo=self.test_repo)
81 RepoModel().delete(repo=self.test_repo)
82
82
83 if hasattr(self, 'g1'):
83 if hasattr(self, 'g1'):
84 RepoGroupModel().delete(self.g1.group_id)
84 RepoGroupModel().delete(self.g1.group_id)
85 if hasattr(self, 'g2'):
85 if hasattr(self, 'g2'):
86 RepoGroupModel().delete(self.g2.group_id)
86 RepoGroupModel().delete(self.g2.group_id)
87
87
88 UserModel().delete(self.u1)
88 UserModel().delete(self.u1)
89 UserModel().delete(self.u2)
89 UserModel().delete(self.u2)
90 UserModel().delete(self.u3)
90 UserModel().delete(self.u3)
91 UserModel().delete(self.a1)
91 UserModel().delete(self.a1)
92
92
93 if hasattr(self, 'ug1'):
93 if hasattr(self, 'ug1'):
94 UserGroupModel().delete(self.ug1, force=True)
94 UserGroupModel().delete(self.ug1, force=True)
95
95
96 Session().commit()
96 Session().commit()
97
97
98 def test_default_perms_set(self, repo_name):
98 def test_default_perms_set(self, repo_name):
99 assert repo_perms(self.u1)[repo_name] == 'repository.read'
99 assert repo_perms(self.u1)[repo_name] == 'repository.read'
100 new_perm = 'repository.write'
100 new_perm = 'repository.write'
101 RepoModel().grant_user_permission(repo=repo_name, user=self.u1,
101 RepoModel().grant_user_permission(repo=repo_name, user=self.u1,
102 perm=new_perm)
102 perm=new_perm)
103 Session().commit()
103 Session().commit()
104 assert repo_perms(self.u1)[repo_name] == new_perm
104 assert repo_perms(self.u1)[repo_name] == new_perm
105
105
106 def test_default_admin_perms_set(self, repo_name):
106 def test_default_admin_perms_set(self, repo_name):
107 assert repo_perms(self.a1)[repo_name] == 'repository.admin'
107 assert repo_perms(self.a1)[repo_name] == 'repository.admin'
108 RepoModel().grant_user_permission(repo=repo_name, user=self.a1,
108 RepoModel().grant_user_permission(repo=repo_name, user=self.a1,
109 perm='repository.write')
109 perm='repository.write')
110 Session().commit()
110 Session().commit()
111 # cannot really downgrade admins permissions !? they still gets set as
111 # cannot really downgrade admins permissions !? they still gets set as
112 # admin !
112 # admin !
113 assert repo_perms(self.a1)[repo_name] == 'repository.admin'
113 assert repo_perms(self.a1)[repo_name] == 'repository.admin'
114
114
115 def test_default_group_perms(self, repo_name):
115 def test_default_group_perms(self, repo_name):
116 self.g1 = fixture.create_repo_group('test1', skip_if_exists=True)
116 self.g1 = fixture.create_repo_group('test1', skip_if_exists=True)
117 self.g2 = fixture.create_repo_group('test2', skip_if_exists=True)
117 self.g2 = fixture.create_repo_group('test2', skip_if_exists=True)
118
118
119 assert repo_perms(self.u1)[repo_name] == 'repository.read'
119 assert repo_perms(self.u1)[repo_name] == 'repository.read'
120 assert group_perms(self.u1) == {
120 assert group_perms(self.u1) == {
121 'test1': 'group.read', 'test2': 'group.read'}
121 'test1': 'group.read', 'test2': 'group.read'}
122 assert global_perms(self.u1) == set(
122 assert global_perms(self.u1) == set(
123 Permission.DEFAULT_USER_PERMISSIONS)
123 Permission.DEFAULT_USER_PERMISSIONS)
124
124
125 def test_default_admin_group_perms(self, repo_name):
125 def test_default_admin_group_perms(self, repo_name):
126 self.g1 = fixture.create_repo_group('test1', skip_if_exists=True)
126 self.g1 = fixture.create_repo_group('test1', skip_if_exists=True)
127 self.g2 = fixture.create_repo_group('test2', skip_if_exists=True)
127 self.g2 = fixture.create_repo_group('test2', skip_if_exists=True)
128
128
129 assert repo_perms(self.a1)[repo_name] == 'repository.admin'
129 assert repo_perms(self.a1)[repo_name] == 'repository.admin'
130 assert group_perms(self.a1) == {
130 assert group_perms(self.a1) == {
131 'test1': 'group.admin', 'test2': 'group.admin'}
131 'test1': 'group.admin', 'test2': 'group.admin'}
132
132
133 def test_default_owner_repo_perms(self, backend, user_util, test_repo):
133 def test_default_owner_repo_perms(self, backend, user_util, test_repo):
134 user = user_util.create_user()
134 user = user_util.create_user()
135 repo = test_repo('minimal', backend.alias)
135 repo = test_repo('minimal', backend.alias)
136 org_owner = repo.user
136 org_owner = repo.user
137 assert repo_perms(user)[repo.repo_name] == 'repository.read'
137 assert repo_perms(user)[repo.repo_name] == 'repository.read'
138
138
139 repo.user = user
139 repo.user = user
140 assert repo_perms(user)[repo.repo_name] == 'repository.admin'
140 assert repo_perms(user)[repo.repo_name] == 'repository.admin'
141 repo.user = org_owner
141 repo.user = org_owner
142
142
143 def test_default_owner_branch_perms(self, user_util, test_user_group):
143 def test_default_owner_branch_perms(self, user_util, test_user_group):
144 user = user_util.create_user()
144 user = user_util.create_user()
145 assert branch_perms(user) == {}
145 assert branch_perms(user) == {}
146
146
147 def test_default_owner_repo_group_perms(self, user_util, test_repo_group):
147 def test_default_owner_repo_group_perms(self, user_util, test_repo_group):
148 user = user_util.create_user()
148 user = user_util.create_user()
149 org_owner = test_repo_group.user
149 org_owner = test_repo_group.user
150
150
151 assert group_perms(user)[test_repo_group.group_name] == 'group.read'
151 assert group_perms(user)[test_repo_group.group_name] == 'group.read'
152
152
153 test_repo_group.user = user
153 test_repo_group.user = user
154 assert group_perms(user)[test_repo_group.group_name] == 'group.admin'
154 assert group_perms(user)[test_repo_group.group_name] == 'group.admin'
155 test_repo_group.user = org_owner
155 test_repo_group.user = org_owner
156
156
157 def test_default_owner_user_group_perms(self, user_util, test_user_group):
157 def test_default_owner_user_group_perms(self, user_util, test_user_group):
158 user = user_util.create_user()
158 user = user_util.create_user()
159 org_owner = test_user_group.user
159 org_owner = test_user_group.user
160
160
161 assert user_group_perms(user)[test_user_group.users_group_name] == 'usergroup.read'
161 assert user_group_perms(user)[test_user_group.users_group_name] == 'usergroup.read'
162
162
163 test_user_group.user = user
163 test_user_group.user = user
164 assert user_group_perms(user)[test_user_group.users_group_name] == 'usergroup.admin'
164 assert user_group_perms(user)[test_user_group.users_group_name] == 'usergroup.admin'
165
165
166 test_user_group.user = org_owner
166 test_user_group.user = org_owner
167
167
168 def test_propagated_permission_from_users_group_by_explicit_perms_exist(
168 def test_propagated_permission_from_users_group_by_explicit_perms_exist(
169 self, repo_name):
169 self, repo_name):
170 # make group
170 # make group
171 self.ug1 = fixture.create_user_group('G1')
171 self.ug1 = fixture.create_user_group('G1')
172 UserGroupModel().add_user_to_group(self.ug1, self.u1)
172 UserGroupModel().add_user_to_group(self.ug1, self.u1)
173
173
174 # set permission to lower
174 # set permission to lower
175 new_perm = 'repository.none'
175 new_perm = 'repository.none'
176 RepoModel().grant_user_permission(
176 RepoModel().grant_user_permission(
177 repo=repo_name, user=self.u1, perm=new_perm)
177 repo=repo_name, user=self.u1, perm=new_perm)
178 Session().commit()
178 Session().commit()
179 assert repo_perms(self.u1)[repo_name] == new_perm
179 assert repo_perms(self.u1)[repo_name] == new_perm
180
180
181 # grant perm for group this should not override permission from user
181 # grant perm for group this should not override permission from user
182 # since it has explicitly set
182 # since it has explicitly set
183 new_perm_gr = 'repository.write'
183 new_perm_gr = 'repository.write'
184 RepoModel().grant_user_group_permission(
184 RepoModel().grant_user_group_permission(
185 repo=repo_name, group_name=self.ug1, perm=new_perm_gr)
185 repo=repo_name, group_name=self.ug1, perm=new_perm_gr)
186
186
187 assert repo_perms(self.u1)[repo_name] == new_perm
187 assert repo_perms(self.u1)[repo_name] == new_perm
188 assert group_perms(self.u1) == {}
188 assert group_perms(self.u1) == {}
189
189
190 def test_propagated_permission_from_users_group(self, repo_name):
190 def test_propagated_permission_from_users_group(self, repo_name):
191 # make group
191 # make group
192 self.ug1 = fixture.create_user_group('G1')
192 self.ug1 = fixture.create_user_group('G1')
193 UserGroupModel().add_user_to_group(self.ug1, self.u3)
193 UserGroupModel().add_user_to_group(self.ug1, self.u3)
194
194
195 # grant perm for group
195 # grant perm for group
196 # this should override default permission from user
196 # this should override default permission from user
197 new_perm_gr = 'repository.write'
197 new_perm_gr = 'repository.write'
198 RepoModel().grant_user_group_permission(
198 RepoModel().grant_user_group_permission(
199 repo=repo_name, group_name=self.ug1, perm=new_perm_gr)
199 repo=repo_name, group_name=self.ug1, perm=new_perm_gr)
200
200
201 assert repo_perms(self.u3)[repo_name] == new_perm_gr
201 assert repo_perms(self.u3)[repo_name] == new_perm_gr
202 assert group_perms(self.u3) == {}
202 assert group_perms(self.u3) == {}
203
203
204 def test_propagated_permission_from_users_group_lower_weight(
204 def test_propagated_permission_from_users_group_lower_weight(
205 self, repo_name):
205 self, repo_name):
206 # make group with user
206 # make group with user
207 self.ug1 = fixture.create_user_group('G1')
207 self.ug1 = fixture.create_user_group('G1')
208 UserGroupModel().add_user_to_group(self.ug1, self.u1)
208 UserGroupModel().add_user_to_group(self.ug1, self.u1)
209
209
210 # set permission to lower
210 # set permission to lower
211 new_perm_h = 'repository.write'
211 new_perm_h = 'repository.write'
212 RepoModel().grant_user_permission(
212 RepoModel().grant_user_permission(
213 repo=repo_name, user=self.u1, perm=new_perm_h)
213 repo=repo_name, user=self.u1, perm=new_perm_h)
214 Session().commit()
214 Session().commit()
215
215
216 assert repo_perms(self.u1)[repo_name] == new_perm_h
216 assert repo_perms(self.u1)[repo_name] == new_perm_h
217
217
218 # grant perm for group this should NOT override permission from user
218 # grant perm for group this should NOT override permission from user
219 # since it's lower than granted
219 # since it's lower than granted
220 new_perm_l = 'repository.read'
220 new_perm_l = 'repository.read'
221 RepoModel().grant_user_group_permission(
221 RepoModel().grant_user_group_permission(
222 repo=repo_name, group_name=self.ug1, perm=new_perm_l)
222 repo=repo_name, group_name=self.ug1, perm=new_perm_l)
223
223
224 assert repo_perms(self.u1)[repo_name] == new_perm_h
224 assert repo_perms(self.u1)[repo_name] == new_perm_h
225 assert group_perms(self.u1) == {}
225 assert group_perms(self.u1) == {}
226
226
227 def test_repo_in_group_permissions(self):
227 def test_repo_in_group_permissions(self):
228 self.g1 = fixture.create_repo_group('group1', skip_if_exists=True)
228 self.g1 = fixture.create_repo_group('group1', skip_if_exists=True)
229 self.g2 = fixture.create_repo_group('group2', skip_if_exists=True)
229 self.g2 = fixture.create_repo_group('group2', skip_if_exists=True)
230 # both perms should be read !
230 # both perms should be read !
231 assert group_perms(self.u1) == \
231 assert group_perms(self.u1) == \
232 {u'group1': u'group.read', u'group2': u'group.read'}
232 {u'group1': u'group.read', u'group2': u'group.read'}
233
233
234 assert group_perms(self.anon) == \
234 assert group_perms(self.anon) == \
235 {u'group1': u'group.read', u'group2': u'group.read'}
235 {u'group1': u'group.read', u'group2': u'group.read'}
236
236
237 # Change perms to none for both groups
237 # Change perms to none for both groups
238 RepoGroupModel().grant_user_permission(
238 RepoGroupModel().grant_user_permission(
239 repo_group=self.g1, user=self.anon, perm='group.none')
239 repo_group=self.g1, user=self.anon, perm='group.none')
240 RepoGroupModel().grant_user_permission(
240 RepoGroupModel().grant_user_permission(
241 repo_group=self.g2, user=self.anon, perm='group.none')
241 repo_group=self.g2, user=self.anon, perm='group.none')
242
242
243 assert group_perms(self.u1) == \
243 assert group_perms(self.u1) == \
244 {u'group1': u'group.none', u'group2': u'group.none'}
244 {u'group1': u'group.none', u'group2': u'group.none'}
245 assert group_perms(self.anon) == \
245 assert group_perms(self.anon) == \
246 {u'group1': u'group.none', u'group2': u'group.none'}
246 {u'group1': u'group.none', u'group2': u'group.none'}
247
247
248 # add repo to group
248 # add repo to group
249 name = RepoGroup.url_sep().join([self.g1.group_name, 'test_perm'])
249 name = RepoGroup.url_sep().join([self.g1.group_name, 'test_perm'])
250 self.test_repo = fixture.create_repo(name=name,
250 self.test_repo = fixture.create_repo(name=name,
251 repo_type='hg',
251 repo_type='hg',
252 repo_group=self.g1,
252 repo_group=self.g1,
253 cur_user=self.u1,)
253 cur_user=self.u1,)
254
254
255 assert group_perms(self.u1) == \
255 assert group_perms(self.u1) == \
256 {u'group1': u'group.none', u'group2': u'group.none'}
256 {u'group1': u'group.none', u'group2': u'group.none'}
257 assert group_perms(self.anon) == \
257 assert group_perms(self.anon) == \
258 {u'group1': u'group.none', u'group2': u'group.none'}
258 {u'group1': u'group.none', u'group2': u'group.none'}
259
259
260 # grant permission for u2 !
260 # grant permission for u2 !
261 RepoGroupModel().grant_user_permission(
261 RepoGroupModel().grant_user_permission(
262 repo_group=self.g1, user=self.u2, perm='group.read')
262 repo_group=self.g1, user=self.u2, perm='group.read')
263 RepoGroupModel().grant_user_permission(
263 RepoGroupModel().grant_user_permission(
264 repo_group=self.g2, user=self.u2, perm='group.read')
264 repo_group=self.g2, user=self.u2, perm='group.read')
265 Session().commit()
265 Session().commit()
266 assert self.u1 != self.u2
266 assert self.u1 != self.u2
267
267
268 # u1 and anon should have not change perms while u2 should !
268 # u1 and anon should have not change perms while u2 should !
269 assert group_perms(self.u1) == \
269 assert group_perms(self.u1) == \
270 {u'group1': u'group.none', u'group2': u'group.none'}
270 {u'group1': u'group.none', u'group2': u'group.none'}
271 assert group_perms(self.u2) == \
271 assert group_perms(self.u2) == \
272 {u'group1': u'group.read', u'group2': u'group.read'}
272 {u'group1': u'group.read', u'group2': u'group.read'}
273 assert group_perms(self.anon) == \
273 assert group_perms(self.anon) == \
274 {u'group1': u'group.none', u'group2': u'group.none'}
274 {u'group1': u'group.none', u'group2': u'group.none'}
275
275
276 def test_repo_group_user_as_user_group_member(self):
276 def test_repo_group_user_as_user_group_member(self):
277 # create Group1
277 # create Group1
278 self.g1 = fixture.create_repo_group('group1', skip_if_exists=True)
278 self.g1 = fixture.create_repo_group('group1', skip_if_exists=True)
279 assert group_perms(self.anon) == {u'group1': u'group.read'}
279 assert group_perms(self.anon) == {u'group1': u'group.read'}
280
280
281 # set default permission to none
281 # set default permission to none
282 RepoGroupModel().grant_user_permission(
282 RepoGroupModel().grant_user_permission(
283 repo_group=self.g1, user=self.anon, perm='group.none')
283 repo_group=self.g1, user=self.anon, perm='group.none')
284 # make group
284 # make group
285 self.ug1 = fixture.create_user_group('G1')
285 self.ug1 = fixture.create_user_group('G1')
286 # add user to group
286 # add user to group
287 UserGroupModel().add_user_to_group(self.ug1, self.u1)
287 UserGroupModel().add_user_to_group(self.ug1, self.u1)
288 Session().commit()
288 Session().commit()
289
289
290 # check if user is in the group
290 # check if user is in the group
291 ug1 = UserGroupModel().get(self.ug1.users_group_id)
291 ug1 = UserGroupModel().get(self.ug1.users_group_id)
292 members = [x.user_id for x in ug1.members]
292 members = [x.user_id for x in ug1.members]
293 assert members == [self.u1.user_id]
293 assert members == [self.u1.user_id]
294 # add some user to that group
294 # add some user to that group
295
295
296 # check his permissions
296 # check his permissions
297 assert group_perms(self.anon) == {u'group1': u'group.none'}
297 assert group_perms(self.anon) == {u'group1': u'group.none'}
298 assert group_perms(self.u1) == {u'group1': u'group.none'}
298 assert group_perms(self.u1) == {u'group1': u'group.none'}
299
299
300 # grant ug1 read permissions for
300 # grant ug1 read permissions for
301 RepoGroupModel().grant_user_group_permission(
301 RepoGroupModel().grant_user_group_permission(
302 repo_group=self.g1, group_name=self.ug1, perm='group.read')
302 repo_group=self.g1, group_name=self.ug1, perm='group.read')
303 Session().commit()
303 Session().commit()
304
304
305 # check if the
305 # check if the
306 obj = Session().query(UserGroupRepoGroupToPerm)\
306 obj = Session().query(UserGroupRepoGroupToPerm)\
307 .filter(UserGroupRepoGroupToPerm.group == self.g1)\
307 .filter(UserGroupRepoGroupToPerm.group == self.g1)\
308 .filter(UserGroupRepoGroupToPerm.users_group == self.ug1)\
308 .filter(UserGroupRepoGroupToPerm.users_group == self.ug1)\
309 .scalar()
309 .scalar()
310 assert obj.permission.permission_name == 'group.read'
310 assert obj.permission.permission_name == 'group.read'
311
311
312 assert group_perms(self.anon) == {u'group1': u'group.none'}
312 assert group_perms(self.anon) == {u'group1': u'group.none'}
313 assert group_perms(self.u1) == {u'group1': u'group.read'}
313 assert group_perms(self.u1) == {u'group1': u'group.read'}
314
314
315 def test_inherited_permissions_from_default_on_user_enabled(self):
315 def test_inherited_permissions_from_default_on_user_enabled(self):
316 # enable fork and create on default user
316 # enable fork and create on default user
317 _form_result = {
317 _form_result = {
318 'default_repo_create': 'hg.create.repository',
318 'default_repo_create': 'hg.create.repository',
319 'default_fork_create': 'hg.fork.repository'
319 'default_fork_create': 'hg.fork.repository'
320 }
320 }
321 PermissionModel().set_new_user_perms(
321 PermissionModel().set_new_user_perms(
322 User.get_default_user(), _form_result)
322 User.get_default_user(), _form_result)
323 Session().commit()
323 Session().commit()
324
324
325 # make sure inherit flag is turned on
325 # make sure inherit flag is turned on
326 self.u1.inherit_default_permissions = True
326 self.u1.inherit_default_permissions = True
327 Session().commit()
327 Session().commit()
328
328
329 # this user will have inherited permissions from default user
329 # this user will have inherited permissions from default user
330 assert global_perms(self.u1) == default_perms()
330 assert global_perms(self.u1) == default_perms()
331
331
332 def test_inherited_permissions_from_default_on_user_disabled(self):
332 def test_inherited_permissions_from_default_on_user_disabled(self):
333 # disable fork and create on default user
333 # disable fork and create on default user
334 _form_result = {
334 _form_result = {
335 'default_repo_create': 'hg.create.none',
335 'default_repo_create': 'hg.create.none',
336 'default_fork_create': 'hg.fork.none'
336 'default_fork_create': 'hg.fork.none'
337 }
337 }
338 PermissionModel().set_new_user_perms(
338 PermissionModel().set_new_user_perms(
339 User.get_default_user(), _form_result)
339 User.get_default_user(), _form_result)
340 Session().commit()
340 Session().commit()
341
341
342 # make sure inherit flag is turned on
342 # make sure inherit flag is turned on
343 self.u1.inherit_default_permissions = True
343 self.u1.inherit_default_permissions = True
344 Session().commit()
344 Session().commit()
345
345
346 # this user will have inherited permissions from default user
346 # this user will have inherited permissions from default user
347 expected_perms = default_perms(
347 expected_perms = default_perms(
348 added=['hg.create.none', 'hg.fork.none'],
348 added=['hg.create.none', 'hg.fork.none'],
349 removed=['hg.create.repository', 'hg.fork.repository'])
349 removed=['hg.create.repository', 'hg.fork.repository'])
350 assert global_perms(self.u1) == expected_perms
350 assert global_perms(self.u1) == expected_perms
351
351
352 def test_non_inherited_permissions_from_default_on_user_enabled(self):
352 def test_non_inherited_permissions_from_default_on_user_enabled(self):
353 user_model = UserModel()
353 user_model = UserModel()
354 # enable fork and create on default user
354 # enable fork and create on default user
355 usr = User.DEFAULT_USER
355 usr = User.DEFAULT_USER
356 user_model.revoke_perm(usr, 'hg.create.none')
356 user_model.revoke_perm(usr, 'hg.create.none')
357 user_model.grant_perm(usr, 'hg.create.repository')
357 user_model.grant_perm(usr, 'hg.create.repository')
358 user_model.revoke_perm(usr, 'hg.fork.none')
358 user_model.revoke_perm(usr, 'hg.fork.none')
359 user_model.grant_perm(usr, 'hg.fork.repository')
359 user_model.grant_perm(usr, 'hg.fork.repository')
360
360
361 # disable global perms on specific user
361 # disable global perms on specific user
362 user_model.revoke_perm(self.u1, 'hg.create.repository')
362 user_model.revoke_perm(self.u1, 'hg.create.repository')
363 user_model.grant_perm(self.u1, 'hg.create.none')
363 user_model.grant_perm(self.u1, 'hg.create.none')
364 user_model.revoke_perm(self.u1, 'hg.fork.repository')
364 user_model.revoke_perm(self.u1, 'hg.fork.repository')
365 user_model.grant_perm(self.u1, 'hg.fork.none')
365 user_model.grant_perm(self.u1, 'hg.fork.none')
366
366
367 # TODO(marcink): check branch permissions now ?
367 # TODO(marcink): check branch permissions now ?
368
368
369 # make sure inherit flag is turned off
369 # make sure inherit flag is turned off
370 self.u1.inherit_default_permissions = False
370 self.u1.inherit_default_permissions = False
371 Session().commit()
371 Session().commit()
372
372
373 # this user will have non inherited permissions from he's
373 # this user will have non inherited permissions from he's
374 # explicitly set permissions
374 # explicitly set permissions
375 assert global_perms(self.u1) == {
375 assert global_perms(self.u1) == {
376 'hg.create.none',
376 'hg.create.none',
377 'hg.fork.none',
377 'hg.fork.none',
378 'hg.register.manual_activate',
378 'hg.register.manual_activate',
379 'hg.password_reset.enabled',
379 'hg.password_reset.enabled',
380 'hg.extern_activate.auto',
380 'hg.extern_activate.auto',
381 'repository.read',
381 'repository.read',
382 'group.read',
382 'group.read',
383 'usergroup.read',
383 'usergroup.read',
384 'branch.push_force',
384 'branch.push_force',
385 }
385 }
386
386
387 def test_non_inherited_permissions_from_default_on_user_disabled(self):
387 def test_non_inherited_permissions_from_default_on_user_disabled(self):
388 user_model = UserModel()
388 user_model = UserModel()
389 # disable fork and create on default user
389 # disable fork and create on default user
390 usr = User.DEFAULT_USER
390 usr = User.DEFAULT_USER
391 user_model.revoke_perm(usr, 'hg.create.repository')
391 user_model.revoke_perm(usr, 'hg.create.repository')
392 user_model.grant_perm(usr, 'hg.create.none')
392 user_model.grant_perm(usr, 'hg.create.none')
393 user_model.revoke_perm(usr, 'hg.fork.repository')
393 user_model.revoke_perm(usr, 'hg.fork.repository')
394 user_model.grant_perm(usr, 'hg.fork.none')
394 user_model.grant_perm(usr, 'hg.fork.none')
395
395
396 # enable global perms on specific user
396 # enable global perms on specific user
397 user_model.revoke_perm(self.u1, 'hg.create.none')
397 user_model.revoke_perm(self.u1, 'hg.create.none')
398 user_model.grant_perm(self.u1, 'hg.create.repository')
398 user_model.grant_perm(self.u1, 'hg.create.repository')
399 user_model.revoke_perm(self.u1, 'hg.fork.none')
399 user_model.revoke_perm(self.u1, 'hg.fork.none')
400 user_model.grant_perm(self.u1, 'hg.fork.repository')
400 user_model.grant_perm(self.u1, 'hg.fork.repository')
401
401
402 # make sure inherit flag is turned off
402 # make sure inherit flag is turned off
403 self.u1.inherit_default_permissions = False
403 self.u1.inherit_default_permissions = False
404 Session().commit()
404 Session().commit()
405
405
406 # TODO(marcink): check branch perms
406 # TODO(marcink): check branch perms
407
407
408 # this user will have non inherited permissions from he's
408 # this user will have non inherited permissions from he's
409 # explicitly set permissions
409 # explicitly set permissions
410 assert global_perms(self.u1) == {
410 assert global_perms(self.u1) == {
411 'hg.create.repository',
411 'hg.create.repository',
412 'hg.fork.repository',
412 'hg.fork.repository',
413 'hg.register.manual_activate',
413 'hg.register.manual_activate',
414 'hg.password_reset.enabled',
414 'hg.password_reset.enabled',
415 'hg.extern_activate.auto',
415 'hg.extern_activate.auto',
416 'repository.read',
416 'repository.read',
417 'group.read',
417 'group.read',
418 'usergroup.read',
418 'usergroup.read',
419 'branch.push_force',
419 'branch.push_force',
420 }
420 }
421
421
422 @pytest.mark.parametrize('perm, expected_perm', [
422 @pytest.mark.parametrize('perm, expected_perm', [
423 ('hg.inherit_default_perms.false', 'repository.none', ),
423 ('hg.inherit_default_perms.false', 'repository.none', ),
424 ('hg.inherit_default_perms.true', 'repository.read', ),
424 ('hg.inherit_default_perms.true', 'repository.read', ),
425 ])
425 ])
426 def test_inherited_permissions_on_objects(self, perm, expected_perm):
426 def test_inherited_permissions_on_objects(self, perm, expected_perm):
427 _form_result = {
427 _form_result = {
428 'default_inherit_default_permissions': perm,
428 'default_inherit_default_permissions': perm,
429 }
429 }
430 PermissionModel().set_new_user_perms(
430 PermissionModel().set_new_user_perms(
431 User.get_default_user(), _form_result)
431 User.get_default_user(), _form_result)
432 Session().commit()
432 Session().commit()
433
433
434 # make sure inherit flag is turned on
434 # make sure inherit flag is turned on
435 self.u1.inherit_default_permissions = True
435 self.u1.inherit_default_permissions = True
436 Session().commit()
436 Session().commit()
437
437
438 # TODO(marcink): check branch perms
438 # TODO(marcink): check branch perms
439
439
440 # this user will have inherited permissions from default user
440 # this user will have inherited permissions from default user
441 assert global_perms(self.u1) == {
441 assert global_perms(self.u1) == {
442 'hg.create.none',
442 'hg.create.none',
443 'hg.fork.none',
443 'hg.fork.none',
444 'hg.register.manual_activate',
444 'hg.register.manual_activate',
445 'hg.password_reset.enabled',
445 'hg.password_reset.enabled',
446 'hg.extern_activate.auto',
446 'hg.extern_activate.auto',
447 'repository.read',
447 'repository.read',
448 'group.read',
448 'group.read',
449 'usergroup.read',
449 'usergroup.read',
450 'branch.push_force',
450 'branch.push_force',
451 'hg.create.write_on_repogroup.true',
451 'hg.create.write_on_repogroup.true',
452 'hg.usergroup.create.false',
452 'hg.usergroup.create.false',
453 'hg.repogroup.create.false',
453 'hg.repogroup.create.false',
454 perm
454 perm
455 }
455 }
456
456
457 assert set(repo_perms(self.u1).values()) == set([expected_perm])
457 assert set(repo_perms(self.u1).values()) == set([expected_perm])
458
458
459 def test_repo_owner_permissions_not_overwritten_by_group(self):
459 def test_repo_owner_permissions_not_overwritten_by_group(self):
460 # create repo as USER,
460 # create repo as USER,
461 self.test_repo = fixture.create_repo(name='myownrepo',
461 self.test_repo = fixture.create_repo(name='myownrepo',
462 repo_type='hg',
462 repo_type='hg',
463 cur_user=self.u1)
463 cur_user=self.u1)
464
464
465 # he has permissions of admin as owner
465 # he has permissions of admin as owner
466 assert repo_perms(self.u1)['myownrepo'] == 'repository.admin'
466 assert repo_perms(self.u1)['myownrepo'] == 'repository.admin'
467
467
468 # set his permission as user group, he should still be admin
468 # set his permission as user group, he should still be admin
469 self.ug1 = fixture.create_user_group('G1')
469 self.ug1 = fixture.create_user_group('G1')
470 UserGroupModel().add_user_to_group(self.ug1, self.u1)
470 UserGroupModel().add_user_to_group(self.ug1, self.u1)
471 RepoModel().grant_user_group_permission(
471 RepoModel().grant_user_group_permission(
472 self.test_repo,
472 self.test_repo,
473 group_name=self.ug1,
473 group_name=self.ug1,
474 perm='repository.none')
474 perm='repository.none')
475 Session().commit()
475 Session().commit()
476
476
477 assert repo_perms(self.u1)['myownrepo'] == 'repository.admin'
477 assert repo_perms(self.u1)['myownrepo'] == 'repository.admin'
478
478
479 def test_repo_owner_permissions_not_overwritten_by_others(self):
479 def test_repo_owner_permissions_not_overwritten_by_others(self):
480 # create repo as USER,
480 # create repo as USER,
481 self.test_repo = fixture.create_repo(name='myownrepo',
481 self.test_repo = fixture.create_repo(name='myownrepo',
482 repo_type='hg',
482 repo_type='hg',
483 cur_user=self.u1)
483 cur_user=self.u1)
484
484
485 # he has permissions of admin as owner
485 # he has permissions of admin as owner
486 assert repo_perms(self.u1)['myownrepo'] == 'repository.admin'
486 assert repo_perms(self.u1)['myownrepo'] == 'repository.admin'
487
487
488 # set his permission as user, he should still be admin
488 # set his permission as user, he should still be admin
489 RepoModel().grant_user_permission(
489 RepoModel().grant_user_permission(
490 self.test_repo, user=self.u1, perm='repository.none')
490 self.test_repo, user=self.u1, perm='repository.none')
491 Session().commit()
491 Session().commit()
492
492
493 assert repo_perms(self.u1)['myownrepo'] == 'repository.admin'
493 assert repo_perms(self.u1)['myownrepo'] == 'repository.admin'
494
494
495 def test_repo_group_owner_permissions_not_overwritten_by_group(self):
495 def test_repo_group_owner_permissions_not_overwritten_by_group(self):
496 # "u1" shall be owner without any special permission assigned
496 # "u1" shall be owner without any special permission assigned
497 self.g1 = fixture.create_repo_group('test1')
497 self.g1 = fixture.create_repo_group('test1')
498
498
499 # Make user group and grant a permission to user group
499 # Make user group and grant a permission to user group
500 self.ug1 = fixture.create_user_group('G1')
500 self.ug1 = fixture.create_user_group('G1')
501 UserGroupModel().add_user_to_group(self.ug1, self.u1)
501 UserGroupModel().add_user_to_group(self.ug1, self.u1)
502 RepoGroupModel().grant_user_group_permission(
502 RepoGroupModel().grant_user_group_permission(
503 repo_group=self.g1, group_name=self.ug1, perm='group.write')
503 repo_group=self.g1, group_name=self.ug1, perm='group.write')
504
504
505 # Verify that user does not get any special permission if he is not
505 # Verify that user does not get any special permission if he is not
506 # owner
506 # owner
507 assert group_perms(self.u1) == {'test1': 'group.write'}
507 assert group_perms(self.u1) == {'test1': 'group.write'}
508
508
509 # Make him owner of the repo group
509 # Make him owner of the repo group
510 self.g1.user = self.u1
510 self.g1.user = self.u1
511 assert group_perms(self.u1) == {'test1': 'group.admin'}
511 assert group_perms(self.u1) == {'test1': 'group.admin'}
512
512
513 def test_repo_group_owner_permissions_not_overwritten_by_others(self):
513 def test_repo_group_owner_permissions_not_overwritten_by_others(self):
514 # "u1" shall be owner without any special permission assigned
514 # "u1" shall be owner without any special permission assigned
515 self.g1 = fixture.create_repo_group('test1')
515 self.g1 = fixture.create_repo_group('test1')
516 RepoGroupModel().grant_user_permission(
516 RepoGroupModel().grant_user_permission(
517 repo_group=self.g1, user=self.u1, perm='group.write')
517 repo_group=self.g1, user=self.u1, perm='group.write')
518
518
519 # Verify that user does not get any special permission if he is not
519 # Verify that user does not get any special permission if he is not
520 # owner
520 # owner
521 assert group_perms(self.u1) == {'test1': 'group.write'}
521 assert group_perms(self.u1) == {'test1': 'group.write'}
522
522
523 # Make him owner of the repo group
523 # Make him owner of the repo group
524 self.g1.user = self.u1
524 self.g1.user = self.u1
525 assert group_perms(self.u1) == {u'test1': 'group.admin'}
525 assert group_perms(self.u1) == {u'test1': 'group.admin'}
526
526
527 def _test_def_user_perm_equal(
527 def _test_def_user_perm_equal(
528 self, user, change_factor=0, compare_keys=None):
528 self, user, change_factor=0, compare_keys=None):
529 perms = UserToPerm.query().filter(UserToPerm.user == user).all()
529 perms = UserToPerm.query().filter(UserToPerm.user == user).all()
530 assert len(perms) == \
530 assert len(perms) == \
531 len(Permission.DEFAULT_USER_PERMISSIONS) + change_factor
531 len(Permission.DEFAULT_USER_PERMISSIONS) + change_factor
532 if compare_keys:
532 if compare_keys:
533 assert set(
533 assert set(
534 x.permissions.permission_name for x in perms) == compare_keys
534 x.permissions.permission_name for x in perms) == compare_keys
535
535
536 def _test_def_user_group_perm_equal(
536 def _test_def_user_group_perm_equal(
537 self, user_group, change_factor=0, compare_keys=None):
537 self, user_group, change_factor=0, compare_keys=None):
538 perms = UserGroupToPerm.query().filter(
538 perms = UserGroupToPerm.query().filter(
539 UserGroupToPerm.users_group == user_group).all()
539 UserGroupToPerm.users_group == user_group).all()
540 assert len(perms) == \
540 assert len(perms) == \
541 len(Permission.DEFAULT_USER_PERMISSIONS) + change_factor
541 len(Permission.DEFAULT_USER_PERMISSIONS) + change_factor
542 if compare_keys:
542 if compare_keys:
543 assert set(
543 assert set(
544 x.permissions.permission_name for x in perms) == compare_keys
544 x.permissions.permission_name for x in perms) == compare_keys
545
545
546 def test_set_default_permissions(self):
546 def test_set_default_permissions(self):
547 PermissionModel().create_default_user_permissions(user=self.u1)
547 PermissionModel().create_default_user_permissions(user=self.u1)
548 self._test_def_user_perm_equal(user=self.u1)
548 self._test_def_user_perm_equal(user=self.u1)
549
549
550 def test_set_default_permissions_after_one_is_missing(self):
550 def test_set_default_permissions_after_one_is_missing(self):
551 PermissionModel().create_default_user_permissions(user=self.u1)
551 PermissionModel().create_default_user_permissions(user=self.u1)
552 self._test_def_user_perm_equal(user=self.u1)
552 self._test_def_user_perm_equal(user=self.u1)
553 # now we delete one, it should be re-created after another call
553 # now we delete one, it should be re-created after another call
554 perms = UserToPerm.query().filter(UserToPerm.user == self.u1).all()
554 perms = UserToPerm.query().filter(UserToPerm.user == self.u1).all()
555 Session().delete(perms[0])
555 Session().delete(perms[0])
556 Session().commit()
556 Session().commit()
557
557
558 self._test_def_user_perm_equal(user=self.u1, change_factor=-1)
558 self._test_def_user_perm_equal(user=self.u1, change_factor=-1)
559
559
560 # create missing one !
560 # create missing one !
561 PermissionModel().create_default_user_permissions(user=self.u1)
561 PermissionModel().create_default_user_permissions(user=self.u1)
562 self._test_def_user_perm_equal(user=self.u1)
562 self._test_def_user_perm_equal(user=self.u1)
563
563
564 @pytest.mark.parametrize("perm, modify_to", [
564 @pytest.mark.parametrize("perm, modify_to", [
565 ('repository.read', 'repository.none'),
565 ('repository.read', 'repository.none'),
566 ('group.read', 'group.none'),
566 ('group.read', 'group.none'),
567 ('usergroup.read', 'usergroup.none'),
567 ('usergroup.read', 'usergroup.none'),
568 ('hg.create.repository', 'hg.create.none'),
568 ('hg.create.repository', 'hg.create.none'),
569 ('hg.fork.repository', 'hg.fork.none'),
569 ('hg.fork.repository', 'hg.fork.none'),
570 ('hg.register.manual_activate', 'hg.register.auto_activate',)
570 ('hg.register.manual_activate', 'hg.register.auto_activate',)
571 ])
571 ])
572 def test_set_default_permissions_after_modification(self, perm, modify_to):
572 def test_set_default_permissions_after_modification(self, perm, modify_to):
573 PermissionModel().create_default_user_permissions(user=self.u1)
573 PermissionModel().create_default_user_permissions(user=self.u1)
574 self._test_def_user_perm_equal(user=self.u1)
574 self._test_def_user_perm_equal(user=self.u1)
575
575
576 old = Permission.get_by_key(perm)
576 old = Permission.get_by_key(perm)
577 new = Permission.get_by_key(modify_to)
577 new = Permission.get_by_key(modify_to)
578 assert old is not None
578 assert old is not None
579 assert new is not None
579 assert new is not None
580
580
581 # now modify permissions
581 # now modify permissions
582 p = UserToPerm.query().filter(
582 p = UserToPerm.query().filter(
583 UserToPerm.user == self.u1).filter(
583 UserToPerm.user == self.u1).filter(
584 UserToPerm.permission == old).one()
584 UserToPerm.permission == old).one()
585 p.permission = new
585 p.permission = new
586 Session().add(p)
586 Session().add(p)
587 Session().commit()
587 Session().commit()
588
588
589 PermissionModel().create_default_user_permissions(user=self.u1)
589 PermissionModel().create_default_user_permissions(user=self.u1)
590 self._test_def_user_perm_equal(user=self.u1)
590 self._test_def_user_perm_equal(user=self.u1)
591
591
592 def test_clear_user_perms(self):
592 def test_clear_user_perms(self):
593 PermissionModel().create_default_user_permissions(user=self.u1)
593 PermissionModel().create_default_user_permissions(user=self.u1)
594 self._test_def_user_perm_equal(user=self.u1)
594 self._test_def_user_perm_equal(user=self.u1)
595
595
596 # now clear permissions
596 # now clear permissions
597 cleared = PermissionModel()._clear_user_perms(self.u1.user_id)
597 cleared = PermissionModel()._clear_user_perms(self.u1.user_id)
598 self._test_def_user_perm_equal(user=self.u1,
598 self._test_def_user_perm_equal(user=self.u1,
599 change_factor=len(cleared)*-1)
599 change_factor=len(cleared)*-1)
600
600
601 def test_clear_user_group_perms(self):
601 def test_clear_user_group_perms(self):
602 self.ug1 = fixture.create_user_group('G1')
602 self.ug1 = fixture.create_user_group('G1')
603 PermissionModel().create_default_user_group_permissions(
603 PermissionModel().create_default_user_group_permissions(
604 user_group=self.ug1)
604 user_group=self.ug1)
605 self._test_def_user_group_perm_equal(user_group=self.ug1)
605 self._test_def_user_group_perm_equal(user_group=self.ug1)
606
606
607 # now clear permissions
607 # now clear permissions
608 cleared = PermissionModel()._clear_user_group_perms(
608 cleared = PermissionModel()._clear_user_group_perms(
609 self.ug1.users_group_id)
609 self.ug1.users_group_id)
610 self._test_def_user_group_perm_equal(user_group=self.ug1,
610 self._test_def_user_group_perm_equal(user_group=self.ug1,
611 change_factor=len(cleared)*-1)
611 change_factor=len(cleared)*-1)
612
612
613 @pytest.mark.parametrize("form_result", [
613 @pytest.mark.parametrize("form_result", [
614 {},
614 {},
615 {'default_repo_create': 'hg.create.repository'},
615 {'default_repo_create': 'hg.create.repository'},
616 {'default_repo_create': 'hg.create.repository',
616 {'default_repo_create': 'hg.create.repository',
617 'default_repo_perm': 'repository.read'},
617 'default_repo_perm': 'repository.read'},
618 {'default_repo_create': 'hg.create.none',
618 {'default_repo_create': 'hg.create.none',
619 'default_repo_perm': 'repository.write',
619 'default_repo_perm': 'repository.write',
620 'default_fork_create': 'hg.fork.none'},
620 'default_fork_create': 'hg.fork.none'},
621 ])
621 ])
622 def test_set_new_user_permissions(self, form_result):
622 def test_set_new_user_permissions(self, form_result):
623 _form_result = {}
623 _form_result = {}
624 _form_result.update(form_result)
624 _form_result.update(form_result)
625 PermissionModel().set_new_user_perms(self.u1, _form_result)
625 PermissionModel().set_new_user_perms(self.u1, _form_result)
626 Session().commit()
626 Session().commit()
627 change_factor = -1 * (len(Permission.DEFAULT_USER_PERMISSIONS)
627 change_factor = -1 * (len(Permission.DEFAULT_USER_PERMISSIONS)
628 - len(form_result.keys()))
628 - len(form_result.keys()))
629 self._test_def_user_perm_equal(
629 self._test_def_user_perm_equal(
630 self.u1, change_factor=change_factor)
630 self.u1, change_factor=change_factor)
631
631
632 @pytest.mark.parametrize("form_result", [
632 @pytest.mark.parametrize("form_result", [
633 {},
633 {},
634 {'default_repo_create': 'hg.create.repository'},
634 {'default_repo_create': 'hg.create.repository'},
635 {'default_repo_create': 'hg.create.repository',
635 {'default_repo_create': 'hg.create.repository',
636 'default_repo_perm': 'repository.read'},
636 'default_repo_perm': 'repository.read'},
637 {'default_repo_create': 'hg.create.none',
637 {'default_repo_create': 'hg.create.none',
638 'default_repo_perm': 'repository.write',
638 'default_repo_perm': 'repository.write',
639 'default_fork_create': 'hg.fork.none'},
639 'default_fork_create': 'hg.fork.none'},
640 ])
640 ])
641 def test_set_new_user_group_permissions(self, form_result):
641 def test_set_new_user_group_permissions(self, form_result):
642 _form_result = {}
642 _form_result = {}
643 _form_result.update(form_result)
643 _form_result.update(form_result)
644 self.ug1 = fixture.create_user_group('G1')
644 self.ug1 = fixture.create_user_group('G1')
645 PermissionModel().set_new_user_group_perms(self.ug1, _form_result)
645 PermissionModel().set_new_user_group_perms(self.ug1, _form_result)
646 Session().commit()
646 Session().commit()
647 change_factor = -1 * (len(Permission.DEFAULT_USER_PERMISSIONS)
647 change_factor = -1 * (len(Permission.DEFAULT_USER_PERMISSIONS)
648 - len(form_result.keys()))
648 - len(form_result.keys()))
649 self._test_def_user_group_perm_equal(
649 self._test_def_user_group_perm_equal(
650 self.ug1, change_factor=change_factor)
650 self.ug1, change_factor=change_factor)
651
651
652 @pytest.mark.parametrize("group_active, expected_perm", [
652 @pytest.mark.parametrize("group_active, expected_perm", [
653 (True, 'repository.admin'),
653 (True, 'repository.admin'),
654 (False, 'repository.read'),
654 (False, 'repository.read'),
655 ])
655 ])
656 def test_get_default_repo_perms_from_user_group_with_active_group(
656 def test_get_default_repo_perms_from_user_group_with_active_group(
657 self, backend, user_util, group_active, expected_perm):
657 self, backend, user_util, group_active, expected_perm):
658 repo = backend.create_repo()
658 repo = backend.create_repo()
659 user = user_util.create_user()
659 user = user_util.create_user()
660 user_group = user_util.create_user_group(
660 user_group = user_util.create_user_group(
661 members=[user], users_group_active=group_active)
661 members=[user], users_group_active=group_active)
662
662
663 user_util.grant_user_group_permission_to_repo(
663 user_util.grant_user_group_permission_to_repo(
664 repo, user_group, 'repository.admin')
664 repo, user_group, 'repository.admin')
665 permissions = repo_perms(user)
665 permissions = repo_perms(user)
666 repo_permission = permissions.get(repo.repo_name)
666 repo_permission = permissions.get(repo.repo_name)
667 assert repo_permission == expected_perm
667 assert repo_permission == expected_perm
668
668
669 @pytest.mark.parametrize("group_active, expected_perm", [
669 @pytest.mark.parametrize("group_active, expected_perm", [
670 (True, 'group.admin'),
670 (True, 'group.admin'),
671 (False, 'group.read')
671 (False, 'group.read')
672 ])
672 ])
673 def test_get_default_group_perms_from_user_group_with_active_group(
673 def test_get_default_group_perms_from_user_group_with_active_group(
674 self, user_util, group_active, expected_perm):
674 self, user_util, group_active, expected_perm):
675 user = user_util.create_user()
675 user = user_util.create_user()
676 repo_group = user_util.create_repo_group()
676 repo_group = user_util.create_repo_group()
677 user_group = user_util.create_user_group(
677 user_group = user_util.create_user_group(
678 members=[user], users_group_active=group_active)
678 members=[user], users_group_active=group_active)
679
679
680 user_util.grant_user_group_permission_to_repo_group(
680 user_util.grant_user_group_permission_to_repo_group(
681 repo_group, user_group, 'group.admin')
681 repo_group, user_group, 'group.admin')
682 permissions = group_perms(user)
682 permissions = group_perms(user)
683 group_permission = permissions.get(repo_group.name)
683 group_permission = permissions.get(repo_group.name)
684 assert group_permission == expected_perm
684 assert group_permission == expected_perm
685
685
686 @pytest.mark.parametrize("group_active, expected_perm", [
686 @pytest.mark.parametrize("group_active, expected_perm", [
687 (True, 'usergroup.admin'),
687 (True, 'usergroup.admin'),
688 (False, 'usergroup.read')
688 (False, 'usergroup.read')
689 ])
689 ])
690 def test_get_default_user_group_perms_from_user_group_with_active_group(
690 def test_get_default_user_group_perms_from_user_group_with_active_group(
691 self, user_util, group_active, expected_perm):
691 self, user_util, group_active, expected_perm):
692 user = user_util.create_user()
692 user = user_util.create_user()
693 user_group = user_util.create_user_group(
693 user_group = user_util.create_user_group(
694 members=[user], users_group_active=group_active)
694 members=[user], users_group_active=group_active)
695 target_user_group = user_util.create_user_group()
695 target_user_group = user_util.create_user_group()
696
696
697 user_util.grant_user_group_permission_to_user_group(
697 user_util.grant_user_group_permission_to_user_group(
698 target_user_group, user_group, 'usergroup.admin')
698 target_user_group, user_group, 'usergroup.admin')
699 permissions = user_group_perms(user)
699 permissions = user_group_perms(user)
700 group_permission = permissions.get(target_user_group.users_group_name)
700 group_permission = permissions.get(target_user_group.users_group_name)
701 assert group_permission == expected_perm
701 assert group_permission == expected_perm
702
702
703
703
704 def repo_perms(user):
704 def repo_perms(user):
705 auth_user = AuthUser(user_id=user.user_id)
705 auth_user = AuthUser(user_id=user.user_id)
706 return auth_user.permissions['repositories']
706 return auth_user.permissions['repositories']
707
707
708
708
709 def branch_perms(user):
709 def branch_perms(user):
710 auth_user = AuthUser(user_id=user.user_id)
710 auth_user = AuthUser(user_id=user.user_id)
711 return auth_user.permissions['repository_branches']
711 return auth_user.permissions['repository_branches']
712
712
713
713
714 def group_perms(user):
714 def group_perms(user):
715 auth_user = AuthUser(user_id=user.user_id)
715 auth_user = AuthUser(user_id=user.user_id)
716 return auth_user.permissions['repositories_groups']
716 return auth_user.permissions['repositories_groups']
717
717
718
718
719 def user_group_perms(user):
719 def user_group_perms(user):
720 auth_user = AuthUser(user_id=user.user_id)
720 auth_user = AuthUser(user_id=user.user_id)
721 return auth_user.permissions['user_groups']
721 return auth_user.permissions['user_groups']
722
722
723
723
724 def global_perms(user):
724 def global_perms(user):
725 auth_user = AuthUser(user_id=user.user_id)
725 auth_user = AuthUser(user_id=user.user_id)
726 return auth_user.permissions['global']
726 return auth_user.permissions['global']
727
727
728
728
729 def default_perms(added=None, removed=None):
729 def default_perms(added=None, removed=None):
730 expected_perms = set(Permission.DEFAULT_USER_PERMISSIONS)
730 expected_perms = set(Permission.DEFAULT_USER_PERMISSIONS)
731 if removed:
731 if removed:
732 expected_perms.difference_update(removed)
732 expected_perms.difference_update(removed)
733 if added:
733 if added:
734 expected_perms.update(added)
734 expected_perms.update(added)
735 return expected_perms
735 return expected_perms
@@ -1,949 +1,949 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import textwrap
23 import textwrap
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.utils2 import safe_unicode
26 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 MergeResponse, MergeFailureReason, Reference)
29 MergeResponse, MergeFailureReason, Reference)
30 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.model.comment import CommentsModel
32 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.db import PullRequest, Session
33 from rhodecode.model.db import PullRequest, Session
34 from rhodecode.model.pull_request import PullRequestModel
34 from rhodecode.model.pull_request import PullRequestModel
35 from rhodecode.model.user import UserModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37
37
38
38
39 pytestmark = [
39 pytestmark = [
40 pytest.mark.backends("git", "hg"),
40 pytest.mark.backends("git", "hg"),
41 ]
41 ]
42
42
43
43
44 @pytest.mark.usefixtures('config_stub')
44 @pytest.mark.usefixtures('config_stub')
45 class TestPullRequestModel(object):
45 class TestPullRequestModel(object):
46
46
47 @pytest.fixture
47 @pytest.fixture()
48 def pull_request(self, request, backend, pr_util):
48 def pull_request(self, request, backend, pr_util):
49 """
49 """
50 A pull request combined with multiples patches.
50 A pull request combined with multiples patches.
51 """
51 """
52 BackendClass = get_backend(backend.alias)
52 BackendClass = get_backend(backend.alias)
53 merge_resp = MergeResponse(
53 merge_resp = MergeResponse(
54 False, False, None, MergeFailureReason.UNKNOWN,
54 False, False, None, MergeFailureReason.UNKNOWN,
55 metadata={'exception': 'MockError'})
55 metadata={'exception': 'MockError'})
56 self.merge_patcher = mock.patch.object(
56 self.merge_patcher = mock.patch.object(
57 BackendClass, 'merge', return_value=merge_resp)
57 BackendClass, 'merge', return_value=merge_resp)
58 self.workspace_remove_patcher = mock.patch.object(
58 self.workspace_remove_patcher = mock.patch.object(
59 BackendClass, 'cleanup_merge_workspace')
59 BackendClass, 'cleanup_merge_workspace')
60
60
61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 self.merge_mock = self.merge_patcher.start()
62 self.merge_mock = self.merge_patcher.start()
63 self.comment_patcher = mock.patch(
63 self.comment_patcher = mock.patch(
64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 self.comment_patcher.start()
65 self.comment_patcher.start()
66 self.notification_patcher = mock.patch(
66 self.notification_patcher = mock.patch(
67 'rhodecode.model.notification.NotificationModel.create')
67 'rhodecode.model.notification.NotificationModel.create')
68 self.notification_patcher.start()
68 self.notification_patcher.start()
69 self.helper_patcher = mock.patch(
69 self.helper_patcher = mock.patch(
70 'rhodecode.lib.helpers.route_path')
70 'rhodecode.lib.helpers.route_path')
71 self.helper_patcher.start()
71 self.helper_patcher.start()
72
72
73 self.hook_patcher = mock.patch.object(PullRequestModel,
73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 'trigger_pull_request_hook')
74 'trigger_pull_request_hook')
75 self.hook_mock = self.hook_patcher.start()
75 self.hook_mock = self.hook_patcher.start()
76
76
77 self.invalidation_patcher = mock.patch(
77 self.invalidation_patcher = mock.patch(
78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 self.invalidation_mock = self.invalidation_patcher.start()
79 self.invalidation_mock = self.invalidation_patcher.start()
80
80
81 self.pull_request = pr_util.create_pull_request(
81 self.pull_request = pr_util.create_pull_request(
82 mergeable=True, name_suffix=u'ąć')
82 mergeable=True, name_suffix=u'ąć')
83 self.source_commit = self.pull_request.source_ref_parts.commit_id
83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 self.target_commit = self.pull_request.target_ref_parts.commit_id
84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 self.repo_id = self.pull_request.target_repo.repo_id
86 self.repo_id = self.pull_request.target_repo.repo_id
87
87
88 @request.addfinalizer
88 @request.addfinalizer
89 def cleanup_pull_request():
89 def cleanup_pull_request():
90 calls = [mock.call(
90 calls = [mock.call(
91 self.pull_request, self.pull_request.author, 'create')]
91 self.pull_request, self.pull_request.author, 'create')]
92 self.hook_mock.assert_has_calls(calls)
92 self.hook_mock.assert_has_calls(calls)
93
93
94 self.workspace_remove_patcher.stop()
94 self.workspace_remove_patcher.stop()
95 self.merge_patcher.stop()
95 self.merge_patcher.stop()
96 self.comment_patcher.stop()
96 self.comment_patcher.stop()
97 self.notification_patcher.stop()
97 self.notification_patcher.stop()
98 self.helper_patcher.stop()
98 self.helper_patcher.stop()
99 self.hook_patcher.stop()
99 self.hook_patcher.stop()
100 self.invalidation_patcher.stop()
100 self.invalidation_patcher.stop()
101
101
102 return self.pull_request
102 return self.pull_request
103
103
104 def test_get_all(self, pull_request):
104 def test_get_all(self, pull_request):
105 prs = PullRequestModel().get_all(pull_request.target_repo)
105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 assert isinstance(prs, list)
106 assert isinstance(prs, list)
107 assert len(prs) == 1
107 assert len(prs) == 1
108
108
109 def test_count_all(self, pull_request):
109 def test_count_all(self, pull_request):
110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 assert pr_count == 1
111 assert pr_count == 1
112
112
113 def test_get_awaiting_review(self, pull_request):
113 def test_get_awaiting_review(self, pull_request):
114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 assert isinstance(prs, list)
115 assert isinstance(prs, list)
116 assert len(prs) == 1
116 assert len(prs) == 1
117
117
118 def test_count_awaiting_review(self, pull_request):
118 def test_count_awaiting_review(self, pull_request):
119 pr_count = PullRequestModel().count_awaiting_review(
119 pr_count = PullRequestModel().count_awaiting_review(
120 pull_request.target_repo)
120 pull_request.target_repo)
121 assert pr_count == 1
121 assert pr_count == 1
122
122
123 def test_get_awaiting_my_review(self, pull_request):
123 def test_get_awaiting_my_review(self, pull_request):
124 PullRequestModel().update_reviewers(
124 PullRequestModel().update_reviewers(
125 pull_request, [(pull_request.author, ['author'], False, [])],
125 pull_request, [(pull_request.author, ['author'], False, [])],
126 pull_request.author)
126 pull_request.author)
127 prs = PullRequestModel().get_awaiting_my_review(
127 prs = PullRequestModel().get_awaiting_my_review(
128 pull_request.target_repo, user_id=pull_request.author.user_id)
128 pull_request.target_repo, user_id=pull_request.author.user_id)
129 assert isinstance(prs, list)
129 assert isinstance(prs, list)
130 assert len(prs) == 1
130 assert len(prs) == 1
131
131
132 def test_count_awaiting_my_review(self, pull_request):
132 def test_count_awaiting_my_review(self, pull_request):
133 PullRequestModel().update_reviewers(
133 PullRequestModel().update_reviewers(
134 pull_request, [(pull_request.author, ['author'], False, [])],
134 pull_request, [(pull_request.author, ['author'], False, [])],
135 pull_request.author)
135 pull_request.author)
136 pr_count = PullRequestModel().count_awaiting_my_review(
136 pr_count = PullRequestModel().count_awaiting_my_review(
137 pull_request.target_repo, user_id=pull_request.author.user_id)
137 pull_request.target_repo, user_id=pull_request.author.user_id)
138 assert pr_count == 1
138 assert pr_count == 1
139
139
140 def test_delete_calls_cleanup_merge(self, pull_request):
140 def test_delete_calls_cleanup_merge(self, pull_request):
141 repo_id = pull_request.target_repo.repo_id
141 repo_id = pull_request.target_repo.repo_id
142 PullRequestModel().delete(pull_request, pull_request.author)
142 PullRequestModel().delete(pull_request, pull_request.author)
143
143
144 self.workspace_remove_mock.assert_called_once_with(
144 self.workspace_remove_mock.assert_called_once_with(
145 repo_id, self.workspace_id)
145 repo_id, self.workspace_id)
146
146
147 def test_close_calls_cleanup_and_hook(self, pull_request):
147 def test_close_calls_cleanup_and_hook(self, pull_request):
148 PullRequestModel().close_pull_request(
148 PullRequestModel().close_pull_request(
149 pull_request, pull_request.author)
149 pull_request, pull_request.author)
150 repo_id = pull_request.target_repo.repo_id
150 repo_id = pull_request.target_repo.repo_id
151
151
152 self.workspace_remove_mock.assert_called_once_with(
152 self.workspace_remove_mock.assert_called_once_with(
153 repo_id, self.workspace_id)
153 repo_id, self.workspace_id)
154 self.hook_mock.assert_called_with(
154 self.hook_mock.assert_called_with(
155 self.pull_request, self.pull_request.author, 'close')
155 self.pull_request, self.pull_request.author, 'close')
156
156
157 def test_merge_status(self, pull_request):
157 def test_merge_status(self, pull_request):
158 self.merge_mock.return_value = MergeResponse(
158 self.merge_mock.return_value = MergeResponse(
159 True, False, None, MergeFailureReason.NONE)
159 True, False, None, MergeFailureReason.NONE)
160
160
161 assert pull_request._last_merge_source_rev is None
161 assert pull_request._last_merge_source_rev is None
162 assert pull_request._last_merge_target_rev is None
162 assert pull_request._last_merge_target_rev is None
163 assert pull_request.last_merge_status is None
163 assert pull_request.last_merge_status is None
164
164
165 status, msg = PullRequestModel().merge_status(pull_request)
165 status, msg = PullRequestModel().merge_status(pull_request)
166 assert status is True
166 assert status is True
167 assert msg == 'This pull request can be automatically merged.'
167 assert msg == 'This pull request can be automatically merged.'
168 self.merge_mock.assert_called_with(
168 self.merge_mock.assert_called_with(
169 self.repo_id, self.workspace_id,
169 self.repo_id, self.workspace_id,
170 pull_request.target_ref_parts,
170 pull_request.target_ref_parts,
171 pull_request.source_repo.scm_instance(),
171 pull_request.source_repo.scm_instance(),
172 pull_request.source_ref_parts, dry_run=True,
172 pull_request.source_ref_parts, dry_run=True,
173 use_rebase=False, close_branch=False)
173 use_rebase=False, close_branch=False)
174
174
175 assert pull_request._last_merge_source_rev == self.source_commit
175 assert pull_request._last_merge_source_rev == self.source_commit
176 assert pull_request._last_merge_target_rev == self.target_commit
176 assert pull_request._last_merge_target_rev == self.target_commit
177 assert pull_request.last_merge_status is MergeFailureReason.NONE
177 assert pull_request.last_merge_status is MergeFailureReason.NONE
178
178
179 self.merge_mock.reset_mock()
179 self.merge_mock.reset_mock()
180 status, msg = PullRequestModel().merge_status(pull_request)
180 status, msg = PullRequestModel().merge_status(pull_request)
181 assert status is True
181 assert status is True
182 assert msg == 'This pull request can be automatically merged.'
182 assert msg == 'This pull request can be automatically merged.'
183 assert self.merge_mock.called is False
183 assert self.merge_mock.called is False
184
184
185 def test_merge_status_known_failure(self, pull_request):
185 def test_merge_status_known_failure(self, pull_request):
186 self.merge_mock.return_value = MergeResponse(
186 self.merge_mock.return_value = MergeResponse(
187 False, False, None, MergeFailureReason.MERGE_FAILED)
187 False, False, None, MergeFailureReason.MERGE_FAILED)
188
188
189 assert pull_request._last_merge_source_rev is None
189 assert pull_request._last_merge_source_rev is None
190 assert pull_request._last_merge_target_rev is None
190 assert pull_request._last_merge_target_rev is None
191 assert pull_request.last_merge_status is None
191 assert pull_request.last_merge_status is None
192
192
193 status, msg = PullRequestModel().merge_status(pull_request)
193 status, msg = PullRequestModel().merge_status(pull_request)
194 assert status is False
194 assert status is False
195 assert msg == 'This pull request cannot be merged because of merge conflicts.'
195 assert msg == 'This pull request cannot be merged because of merge conflicts.'
196 self.merge_mock.assert_called_with(
196 self.merge_mock.assert_called_with(
197 self.repo_id, self.workspace_id,
197 self.repo_id, self.workspace_id,
198 pull_request.target_ref_parts,
198 pull_request.target_ref_parts,
199 pull_request.source_repo.scm_instance(),
199 pull_request.source_repo.scm_instance(),
200 pull_request.source_ref_parts, dry_run=True,
200 pull_request.source_ref_parts, dry_run=True,
201 use_rebase=False, close_branch=False)
201 use_rebase=False, close_branch=False)
202
202
203 assert pull_request._last_merge_source_rev == self.source_commit
203 assert pull_request._last_merge_source_rev == self.source_commit
204 assert pull_request._last_merge_target_rev == self.target_commit
204 assert pull_request._last_merge_target_rev == self.target_commit
205 assert (
205 assert (
206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
207
207
208 self.merge_mock.reset_mock()
208 self.merge_mock.reset_mock()
209 status, msg = PullRequestModel().merge_status(pull_request)
209 status, msg = PullRequestModel().merge_status(pull_request)
210 assert status is False
210 assert status is False
211 assert msg == 'This pull request cannot be merged because of merge conflicts.'
211 assert msg == 'This pull request cannot be merged because of merge conflicts.'
212 assert self.merge_mock.called is False
212 assert self.merge_mock.called is False
213
213
214 def test_merge_status_unknown_failure(self, pull_request):
214 def test_merge_status_unknown_failure(self, pull_request):
215 self.merge_mock.return_value = MergeResponse(
215 self.merge_mock.return_value = MergeResponse(
216 False, False, None, MergeFailureReason.UNKNOWN,
216 False, False, None, MergeFailureReason.UNKNOWN,
217 metadata={'exception': 'MockError'})
217 metadata={'exception': 'MockError'})
218
218
219 assert pull_request._last_merge_source_rev is None
219 assert pull_request._last_merge_source_rev is None
220 assert pull_request._last_merge_target_rev is None
220 assert pull_request._last_merge_target_rev is None
221 assert pull_request.last_merge_status is None
221 assert pull_request.last_merge_status is None
222
222
223 status, msg = PullRequestModel().merge_status(pull_request)
223 status, msg = PullRequestModel().merge_status(pull_request)
224 assert status is False
224 assert status is False
225 assert msg == (
225 assert msg == (
226 'This pull request cannot be merged because of an unhandled exception. '
226 'This pull request cannot be merged because of an unhandled exception. '
227 'MockError')
227 'MockError')
228 self.merge_mock.assert_called_with(
228 self.merge_mock.assert_called_with(
229 self.repo_id, self.workspace_id,
229 self.repo_id, self.workspace_id,
230 pull_request.target_ref_parts,
230 pull_request.target_ref_parts,
231 pull_request.source_repo.scm_instance(),
231 pull_request.source_repo.scm_instance(),
232 pull_request.source_ref_parts, dry_run=True,
232 pull_request.source_ref_parts, dry_run=True,
233 use_rebase=False, close_branch=False)
233 use_rebase=False, close_branch=False)
234
234
235 assert pull_request._last_merge_source_rev is None
235 assert pull_request._last_merge_source_rev is None
236 assert pull_request._last_merge_target_rev is None
236 assert pull_request._last_merge_target_rev is None
237 assert pull_request.last_merge_status is None
237 assert pull_request.last_merge_status is None
238
238
239 self.merge_mock.reset_mock()
239 self.merge_mock.reset_mock()
240 status, msg = PullRequestModel().merge_status(pull_request)
240 status, msg = PullRequestModel().merge_status(pull_request)
241 assert status is False
241 assert status is False
242 assert msg == (
242 assert msg == (
243 'This pull request cannot be merged because of an unhandled exception. '
243 'This pull request cannot be merged because of an unhandled exception. '
244 'MockError')
244 'MockError')
245 assert self.merge_mock.called is True
245 assert self.merge_mock.called is True
246
246
247 def test_merge_status_when_target_is_locked(self, pull_request):
247 def test_merge_status_when_target_is_locked(self, pull_request):
248 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
248 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
249 status, msg = PullRequestModel().merge_status(pull_request)
249 status, msg = PullRequestModel().merge_status(pull_request)
250 assert status is False
250 assert status is False
251 assert msg == (
251 assert msg == (
252 'This pull request cannot be merged because the target repository '
252 'This pull request cannot be merged because the target repository '
253 'is locked by user:1.')
253 'is locked by user:1.')
254
254
255 def test_merge_status_requirements_check_target(self, pull_request):
255 def test_merge_status_requirements_check_target(self, pull_request):
256
256
257 def has_largefiles(self, repo):
257 def has_largefiles(self, repo):
258 return repo == pull_request.source_repo
258 return repo == pull_request.source_repo
259
259
260 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
260 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
261 with patcher:
261 with patcher:
262 status, msg = PullRequestModel().merge_status(pull_request)
262 status, msg = PullRequestModel().merge_status(pull_request)
263
263
264 assert status is False
264 assert status is False
265 assert msg == 'Target repository large files support is disabled.'
265 assert msg == 'Target repository large files support is disabled.'
266
266
267 def test_merge_status_requirements_check_source(self, pull_request):
267 def test_merge_status_requirements_check_source(self, pull_request):
268
268
269 def has_largefiles(self, repo):
269 def has_largefiles(self, repo):
270 return repo == pull_request.target_repo
270 return repo == pull_request.target_repo
271
271
272 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
272 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
273 with patcher:
273 with patcher:
274 status, msg = PullRequestModel().merge_status(pull_request)
274 status, msg = PullRequestModel().merge_status(pull_request)
275
275
276 assert status is False
276 assert status is False
277 assert msg == 'Source repository large files support is disabled.'
277 assert msg == 'Source repository large files support is disabled.'
278
278
279 def test_merge(self, pull_request, merge_extras):
279 def test_merge(self, pull_request, merge_extras):
280 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
280 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
281 merge_ref = Reference(
281 merge_ref = Reference(
282 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
282 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
283 self.merge_mock.return_value = MergeResponse(
283 self.merge_mock.return_value = MergeResponse(
284 True, True, merge_ref, MergeFailureReason.NONE)
284 True, True, merge_ref, MergeFailureReason.NONE)
285
285
286 merge_extras['repository'] = pull_request.target_repo.repo_name
286 merge_extras['repository'] = pull_request.target_repo.repo_name
287 PullRequestModel().merge_repo(
287 PullRequestModel().merge_repo(
288 pull_request, pull_request.author, extras=merge_extras)
288 pull_request, pull_request.author, extras=merge_extras)
289
289
290 message = (
290 message = (
291 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
291 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
292 u'\n\n {pr_title}'.format(
292 u'\n\n {pr_title}'.format(
293 pr_id=pull_request.pull_request_id,
293 pr_id=pull_request.pull_request_id,
294 source_repo=safe_unicode(
294 source_repo=safe_unicode(
295 pull_request.source_repo.scm_instance().name),
295 pull_request.source_repo.scm_instance().name),
296 source_ref_name=pull_request.source_ref_parts.name,
296 source_ref_name=pull_request.source_ref_parts.name,
297 pr_title=safe_unicode(pull_request.title)
297 pr_title=safe_unicode(pull_request.title)
298 )
298 )
299 )
299 )
300 self.merge_mock.assert_called_with(
300 self.merge_mock.assert_called_with(
301 self.repo_id, self.workspace_id,
301 self.repo_id, self.workspace_id,
302 pull_request.target_ref_parts,
302 pull_request.target_ref_parts,
303 pull_request.source_repo.scm_instance(),
303 pull_request.source_repo.scm_instance(),
304 pull_request.source_ref_parts,
304 pull_request.source_ref_parts,
305 user_name=user.short_contact, user_email=user.email, message=message,
305 user_name=user.short_contact, user_email=user.email, message=message,
306 use_rebase=False, close_branch=False
306 use_rebase=False, close_branch=False
307 )
307 )
308 self.invalidation_mock.assert_called_once_with(
308 self.invalidation_mock.assert_called_once_with(
309 pull_request.target_repo.repo_name)
309 pull_request.target_repo.repo_name)
310
310
311 self.hook_mock.assert_called_with(
311 self.hook_mock.assert_called_with(
312 self.pull_request, self.pull_request.author, 'merge')
312 self.pull_request, self.pull_request.author, 'merge')
313
313
314 pull_request = PullRequest.get(pull_request.pull_request_id)
314 pull_request = PullRequest.get(pull_request.pull_request_id)
315 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
315 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
316
316
317 def test_merge_with_status_lock(self, pull_request, merge_extras):
317 def test_merge_with_status_lock(self, pull_request, merge_extras):
318 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
318 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
319 merge_ref = Reference(
319 merge_ref = Reference(
320 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
320 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
321 self.merge_mock.return_value = MergeResponse(
321 self.merge_mock.return_value = MergeResponse(
322 True, True, merge_ref, MergeFailureReason.NONE)
322 True, True, merge_ref, MergeFailureReason.NONE)
323
323
324 merge_extras['repository'] = pull_request.target_repo.repo_name
324 merge_extras['repository'] = pull_request.target_repo.repo_name
325
325
326 with pull_request.set_state(PullRequest.STATE_UPDATING):
326 with pull_request.set_state(PullRequest.STATE_UPDATING):
327 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
327 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
328 PullRequestModel().merge_repo(
328 PullRequestModel().merge_repo(
329 pull_request, pull_request.author, extras=merge_extras)
329 pull_request, pull_request.author, extras=merge_extras)
330
330
331 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
331 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
332
332
333 message = (
333 message = (
334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
335 u'\n\n {pr_title}'.format(
335 u'\n\n {pr_title}'.format(
336 pr_id=pull_request.pull_request_id,
336 pr_id=pull_request.pull_request_id,
337 source_repo=safe_unicode(
337 source_repo=safe_unicode(
338 pull_request.source_repo.scm_instance().name),
338 pull_request.source_repo.scm_instance().name),
339 source_ref_name=pull_request.source_ref_parts.name,
339 source_ref_name=pull_request.source_ref_parts.name,
340 pr_title=safe_unicode(pull_request.title)
340 pr_title=safe_unicode(pull_request.title)
341 )
341 )
342 )
342 )
343 self.merge_mock.assert_called_with(
343 self.merge_mock.assert_called_with(
344 self.repo_id, self.workspace_id,
344 self.repo_id, self.workspace_id,
345 pull_request.target_ref_parts,
345 pull_request.target_ref_parts,
346 pull_request.source_repo.scm_instance(),
346 pull_request.source_repo.scm_instance(),
347 pull_request.source_ref_parts,
347 pull_request.source_ref_parts,
348 user_name=user.short_contact, user_email=user.email, message=message,
348 user_name=user.short_contact, user_email=user.email, message=message,
349 use_rebase=False, close_branch=False
349 use_rebase=False, close_branch=False
350 )
350 )
351 self.invalidation_mock.assert_called_once_with(
351 self.invalidation_mock.assert_called_once_with(
352 pull_request.target_repo.repo_name)
352 pull_request.target_repo.repo_name)
353
353
354 self.hook_mock.assert_called_with(
354 self.hook_mock.assert_called_with(
355 self.pull_request, self.pull_request.author, 'merge')
355 self.pull_request, self.pull_request.author, 'merge')
356
356
357 pull_request = PullRequest.get(pull_request.pull_request_id)
357 pull_request = PullRequest.get(pull_request.pull_request_id)
358 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
358 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
359
359
360 def test_merge_failed(self, pull_request, merge_extras):
360 def test_merge_failed(self, pull_request, merge_extras):
361 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
361 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
362 merge_ref = Reference(
362 merge_ref = Reference(
363 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
363 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
364 self.merge_mock.return_value = MergeResponse(
364 self.merge_mock.return_value = MergeResponse(
365 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
365 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
366
366
367 merge_extras['repository'] = pull_request.target_repo.repo_name
367 merge_extras['repository'] = pull_request.target_repo.repo_name
368 PullRequestModel().merge_repo(
368 PullRequestModel().merge_repo(
369 pull_request, pull_request.author, extras=merge_extras)
369 pull_request, pull_request.author, extras=merge_extras)
370
370
371 message = (
371 message = (
372 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
372 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
373 u'\n\n {pr_title}'.format(
373 u'\n\n {pr_title}'.format(
374 pr_id=pull_request.pull_request_id,
374 pr_id=pull_request.pull_request_id,
375 source_repo=safe_unicode(
375 source_repo=safe_unicode(
376 pull_request.source_repo.scm_instance().name),
376 pull_request.source_repo.scm_instance().name),
377 source_ref_name=pull_request.source_ref_parts.name,
377 source_ref_name=pull_request.source_ref_parts.name,
378 pr_title=safe_unicode(pull_request.title)
378 pr_title=safe_unicode(pull_request.title)
379 )
379 )
380 )
380 )
381 self.merge_mock.assert_called_with(
381 self.merge_mock.assert_called_with(
382 self.repo_id, self.workspace_id,
382 self.repo_id, self.workspace_id,
383 pull_request.target_ref_parts,
383 pull_request.target_ref_parts,
384 pull_request.source_repo.scm_instance(),
384 pull_request.source_repo.scm_instance(),
385 pull_request.source_ref_parts,
385 pull_request.source_ref_parts,
386 user_name=user.short_contact, user_email=user.email, message=message,
386 user_name=user.short_contact, user_email=user.email, message=message,
387 use_rebase=False, close_branch=False
387 use_rebase=False, close_branch=False
388 )
388 )
389
389
390 pull_request = PullRequest.get(pull_request.pull_request_id)
390 pull_request = PullRequest.get(pull_request.pull_request_id)
391 assert self.invalidation_mock.called is False
391 assert self.invalidation_mock.called is False
392 assert pull_request.merge_rev is None
392 assert pull_request.merge_rev is None
393
393
394 def test_get_commit_ids(self, pull_request):
394 def test_get_commit_ids(self, pull_request):
395 # The PR has been not merget yet, so expect an exception
395 # The PR has been not merget yet, so expect an exception
396 with pytest.raises(ValueError):
396 with pytest.raises(ValueError):
397 PullRequestModel()._get_commit_ids(pull_request)
397 PullRequestModel()._get_commit_ids(pull_request)
398
398
399 # Merge revision is in the revisions list
399 # Merge revision is in the revisions list
400 pull_request.merge_rev = pull_request.revisions[0]
400 pull_request.merge_rev = pull_request.revisions[0]
401 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
401 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
402 assert commit_ids == pull_request.revisions
402 assert commit_ids == pull_request.revisions
403
403
404 # Merge revision is not in the revisions list
404 # Merge revision is not in the revisions list
405 pull_request.merge_rev = 'f000' * 10
405 pull_request.merge_rev = 'f000' * 10
406 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
406 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
407 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
407 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
408
408
409 def test_get_diff_from_pr_version(self, pull_request):
409 def test_get_diff_from_pr_version(self, pull_request):
410 source_repo = pull_request.source_repo
410 source_repo = pull_request.source_repo
411 source_ref_id = pull_request.source_ref_parts.commit_id
411 source_ref_id = pull_request.source_ref_parts.commit_id
412 target_ref_id = pull_request.target_ref_parts.commit_id
412 target_ref_id = pull_request.target_ref_parts.commit_id
413 diff = PullRequestModel()._get_diff_from_pr_or_version(
413 diff = PullRequestModel()._get_diff_from_pr_or_version(
414 source_repo, source_ref_id, target_ref_id,
414 source_repo, source_ref_id, target_ref_id,
415 hide_whitespace_changes=False, diff_context=6)
415 hide_whitespace_changes=False, diff_context=6)
416 assert 'file_1' in diff.raw
416 assert 'file_1' in diff.raw
417
417
418 def test_generate_title_returns_unicode(self):
418 def test_generate_title_returns_unicode(self):
419 title = PullRequestModel().generate_pullrequest_title(
419 title = PullRequestModel().generate_pullrequest_title(
420 source='source-dummy',
420 source='source-dummy',
421 source_ref='source-ref-dummy',
421 source_ref='source-ref-dummy',
422 target='target-dummy',
422 target='target-dummy',
423 )
423 )
424 assert type(title) == unicode
424 assert type(title) == unicode
425
425
426
426
427 @pytest.mark.usefixtures('config_stub')
427 @pytest.mark.usefixtures('config_stub')
428 class TestIntegrationMerge(object):
428 class TestIntegrationMerge(object):
429 @pytest.mark.parametrize('extra_config', (
429 @pytest.mark.parametrize('extra_config', (
430 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
430 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
431 ))
431 ))
432 def test_merge_triggers_push_hooks(
432 def test_merge_triggers_push_hooks(
433 self, pr_util, user_admin, capture_rcextensions, merge_extras,
433 self, pr_util, user_admin, capture_rcextensions, merge_extras,
434 extra_config):
434 extra_config):
435
435
436 pull_request = pr_util.create_pull_request(
436 pull_request = pr_util.create_pull_request(
437 approved=True, mergeable=True)
437 approved=True, mergeable=True)
438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
439 merge_extras['repository'] = pull_request.target_repo.repo_name
439 merge_extras['repository'] = pull_request.target_repo.repo_name
440 Session().commit()
440 Session().commit()
441
441
442 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
442 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
443 merge_state = PullRequestModel().merge_repo(
443 merge_state = PullRequestModel().merge_repo(
444 pull_request, user_admin, extras=merge_extras)
444 pull_request, user_admin, extras=merge_extras)
445
445
446 assert merge_state.executed
446 assert merge_state.executed
447 assert '_pre_push_hook' in capture_rcextensions
447 assert '_pre_push_hook' in capture_rcextensions
448 assert '_push_hook' in capture_rcextensions
448 assert '_push_hook' in capture_rcextensions
449
449
450 def test_merge_can_be_rejected_by_pre_push_hook(
450 def test_merge_can_be_rejected_by_pre_push_hook(
451 self, pr_util, user_admin, capture_rcextensions, merge_extras):
451 self, pr_util, user_admin, capture_rcextensions, merge_extras):
452 pull_request = pr_util.create_pull_request(
452 pull_request = pr_util.create_pull_request(
453 approved=True, mergeable=True)
453 approved=True, mergeable=True)
454 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
454 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
455 merge_extras['repository'] = pull_request.target_repo.repo_name
455 merge_extras['repository'] = pull_request.target_repo.repo_name
456 Session().commit()
456 Session().commit()
457
457
458 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
458 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
459 pre_pull.side_effect = RepositoryError("Disallow push!")
459 pre_pull.side_effect = RepositoryError("Disallow push!")
460 merge_status = PullRequestModel().merge_repo(
460 merge_status = PullRequestModel().merge_repo(
461 pull_request, user_admin, extras=merge_extras)
461 pull_request, user_admin, extras=merge_extras)
462
462
463 assert not merge_status.executed
463 assert not merge_status.executed
464 assert 'pre_push' not in capture_rcextensions
464 assert 'pre_push' not in capture_rcextensions
465 assert 'post_push' not in capture_rcextensions
465 assert 'post_push' not in capture_rcextensions
466
466
467 def test_merge_fails_if_target_is_locked(
467 def test_merge_fails_if_target_is_locked(
468 self, pr_util, user_regular, merge_extras):
468 self, pr_util, user_regular, merge_extras):
469 pull_request = pr_util.create_pull_request(
469 pull_request = pr_util.create_pull_request(
470 approved=True, mergeable=True)
470 approved=True, mergeable=True)
471 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
471 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
472 pull_request.target_repo.locked = locked_by
472 pull_request.target_repo.locked = locked_by
473 # TODO: johbo: Check if this can work based on the database, currently
473 # TODO: johbo: Check if this can work based on the database, currently
474 # all data is pre-computed, that's why just updating the DB is not
474 # all data is pre-computed, that's why just updating the DB is not
475 # enough.
475 # enough.
476 merge_extras['locked_by'] = locked_by
476 merge_extras['locked_by'] = locked_by
477 merge_extras['repository'] = pull_request.target_repo.repo_name
477 merge_extras['repository'] = pull_request.target_repo.repo_name
478 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
478 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
479 Session().commit()
479 Session().commit()
480 merge_status = PullRequestModel().merge_repo(
480 merge_status = PullRequestModel().merge_repo(
481 pull_request, user_regular, extras=merge_extras)
481 pull_request, user_regular, extras=merge_extras)
482 assert not merge_status.executed
482 assert not merge_status.executed
483
483
484
484
485 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
485 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
486 (False, 1, 0),
486 (False, 1, 0),
487 (True, 0, 1),
487 (True, 0, 1),
488 ])
488 ])
489 def test_outdated_comments(
489 def test_outdated_comments(
490 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
490 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
491 pull_request = pr_util.create_pull_request()
491 pull_request = pr_util.create_pull_request()
492 pr_util.create_inline_comment(file_path='not_in_updated_diff')
492 pr_util.create_inline_comment(file_path='not_in_updated_diff')
493
493
494 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
494 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
495 pr_util.add_one_commit()
495 pr_util.add_one_commit()
496 assert_inline_comments(
496 assert_inline_comments(
497 pull_request, visible=inlines_count, outdated=outdated_count)
497 pull_request, visible=inlines_count, outdated=outdated_count)
498 outdated_comment_mock.assert_called_with(pull_request)
498 outdated_comment_mock.assert_called_with(pull_request)
499
499
500
500
501 @pytest.mark.parametrize('mr_type, expected_msg', [
501 @pytest.mark.parametrize('mr_type, expected_msg', [
502 (MergeFailureReason.NONE,
502 (MergeFailureReason.NONE,
503 'This pull request can be automatically merged.'),
503 'This pull request can be automatically merged.'),
504 (MergeFailureReason.UNKNOWN,
504 (MergeFailureReason.UNKNOWN,
505 'This pull request cannot be merged because of an unhandled exception. CRASH'),
505 'This pull request cannot be merged because of an unhandled exception. CRASH'),
506 (MergeFailureReason.MERGE_FAILED,
506 (MergeFailureReason.MERGE_FAILED,
507 'This pull request cannot be merged because of merge conflicts.'),
507 'This pull request cannot be merged because of merge conflicts.'),
508 (MergeFailureReason.PUSH_FAILED,
508 (MergeFailureReason.PUSH_FAILED,
509 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
509 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
510 (MergeFailureReason.TARGET_IS_NOT_HEAD,
510 (MergeFailureReason.TARGET_IS_NOT_HEAD,
511 'This pull request cannot be merged because the target `ref_name` is not a head.'),
511 'This pull request cannot be merged because the target `ref_name` is not a head.'),
512 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
512 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
513 'This pull request cannot be merged because the source contains more branches than the target.'),
513 'This pull request cannot be merged because the source contains more branches than the target.'),
514 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
514 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
515 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
515 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
516 (MergeFailureReason.TARGET_IS_LOCKED,
516 (MergeFailureReason.TARGET_IS_LOCKED,
517 'This pull request cannot be merged because the target repository is locked by user:123.'),
517 'This pull request cannot be merged because the target repository is locked by user:123.'),
518 (MergeFailureReason.MISSING_TARGET_REF,
518 (MergeFailureReason.MISSING_TARGET_REF,
519 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
519 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
520 (MergeFailureReason.MISSING_SOURCE_REF,
520 (MergeFailureReason.MISSING_SOURCE_REF,
521 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
521 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
522 (MergeFailureReason.SUBREPO_MERGE_FAILED,
522 (MergeFailureReason.SUBREPO_MERGE_FAILED,
523 'This pull request cannot be merged because of conflicts related to sub repositories.'),
523 'This pull request cannot be merged because of conflicts related to sub repositories.'),
524
524
525 ])
525 ])
526 def test_merge_response_message(mr_type, expected_msg):
526 def test_merge_response_message(mr_type, expected_msg):
527 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
527 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
528 metadata = {
528 metadata = {
529 'exception': "CRASH",
529 'exception': "CRASH",
530 'target': 'some-repo',
530 'target': 'some-repo',
531 'merge_commit': 'merge_commit',
531 'merge_commit': 'merge_commit',
532 'target_ref': merge_ref,
532 'target_ref': merge_ref,
533 'source_ref': merge_ref,
533 'source_ref': merge_ref,
534 'heads': ','.join(['a', 'b', 'c']),
534 'heads': ','.join(['a', 'b', 'c']),
535 'locked_by': 'user:123'}
535 'locked_by': 'user:123'}
536
536
537 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
537 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
538 assert merge_response.merge_status_message == expected_msg
538 assert merge_response.merge_status_message == expected_msg
539
539
540
540
541 @pytest.fixture
541 @pytest.fixture()
542 def merge_extras(user_regular):
542 def merge_extras(user_regular):
543 """
543 """
544 Context for the vcs operation when running a merge.
544 Context for the vcs operation when running a merge.
545 """
545 """
546 extras = {
546 extras = {
547 'ip': '127.0.0.1',
547 'ip': '127.0.0.1',
548 'username': user_regular.username,
548 'username': user_regular.username,
549 'user_id': user_regular.user_id,
549 'user_id': user_regular.user_id,
550 'action': 'push',
550 'action': 'push',
551 'repository': 'fake_target_repo_name',
551 'repository': 'fake_target_repo_name',
552 'scm': 'git',
552 'scm': 'git',
553 'config': 'fake_config_ini_path',
553 'config': 'fake_config_ini_path',
554 'repo_store': '',
554 'repo_store': '',
555 'make_lock': None,
555 'make_lock': None,
556 'locked_by': [None, None, None],
556 'locked_by': [None, None, None],
557 'server_url': 'http://test.example.com:5000',
557 'server_url': 'http://test.example.com:5000',
558 'hooks': ['push', 'pull'],
558 'hooks': ['push', 'pull'],
559 'is_shadow_repo': False,
559 'is_shadow_repo': False,
560 }
560 }
561 return extras
561 return extras
562
562
563
563
564 @pytest.mark.usefixtures('config_stub')
564 @pytest.mark.usefixtures('config_stub')
565 class TestUpdateCommentHandling(object):
565 class TestUpdateCommentHandling(object):
566
566
567 @pytest.fixture(autouse=True, scope='class')
567 @pytest.fixture(autouse=True, scope='class')
568 def enable_outdated_comments(self, request, baseapp):
568 def enable_outdated_comments(self, request, baseapp):
569 config_patch = mock.patch.dict(
569 config_patch = mock.patch.dict(
570 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
570 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
571 config_patch.start()
571 config_patch.start()
572
572
573 @request.addfinalizer
573 @request.addfinalizer
574 def cleanup():
574 def cleanup():
575 config_patch.stop()
575 config_patch.stop()
576
576
577 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
577 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
578 commits = [
578 commits = [
579 {'message': 'a'},
579 {'message': 'a'},
580 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
580 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
581 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
581 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
582 ]
582 ]
583 pull_request = pr_util.create_pull_request(
583 pull_request = pr_util.create_pull_request(
584 commits=commits, target_head='a', source_head='b', revisions=['b'])
584 commits=commits, target_head='a', source_head='b', revisions=['b'])
585 pr_util.create_inline_comment(file_path='file_b')
585 pr_util.create_inline_comment(file_path='file_b')
586 pr_util.add_one_commit(head='c')
586 pr_util.add_one_commit(head='c')
587
587
588 assert_inline_comments(pull_request, visible=1, outdated=0)
588 assert_inline_comments(pull_request, visible=1, outdated=0)
589
589
590 def test_comment_stays_unflagged_on_change_above(self, pr_util):
590 def test_comment_stays_unflagged_on_change_above(self, pr_util):
591 original_content = ''.join(
591 original_content = ''.join(
592 ['line {}\n'.format(x) for x in range(1, 11)])
592 ['line {}\n'.format(x) for x in range(1, 11)])
593 updated_content = 'new_line_at_top\n' + original_content
593 updated_content = 'new_line_at_top\n' + original_content
594 commits = [
594 commits = [
595 {'message': 'a'},
595 {'message': 'a'},
596 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
596 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
597 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
597 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
598 ]
598 ]
599 pull_request = pr_util.create_pull_request(
599 pull_request = pr_util.create_pull_request(
600 commits=commits, target_head='a', source_head='b', revisions=['b'])
600 commits=commits, target_head='a', source_head='b', revisions=['b'])
601
601
602 with outdated_comments_patcher():
602 with outdated_comments_patcher():
603 comment = pr_util.create_inline_comment(
603 comment = pr_util.create_inline_comment(
604 line_no=u'n8', file_path='file_b')
604 line_no=u'n8', file_path='file_b')
605 pr_util.add_one_commit(head='c')
605 pr_util.add_one_commit(head='c')
606
606
607 assert_inline_comments(pull_request, visible=1, outdated=0)
607 assert_inline_comments(pull_request, visible=1, outdated=0)
608 assert comment.line_no == u'n9'
608 assert comment.line_no == u'n9'
609
609
610 def test_comment_stays_unflagged_on_change_below(self, pr_util):
610 def test_comment_stays_unflagged_on_change_below(self, pr_util):
611 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
611 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
612 updated_content = original_content + 'new_line_at_end\n'
612 updated_content = original_content + 'new_line_at_end\n'
613 commits = [
613 commits = [
614 {'message': 'a'},
614 {'message': 'a'},
615 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
615 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
616 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
616 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
617 ]
617 ]
618 pull_request = pr_util.create_pull_request(
618 pull_request = pr_util.create_pull_request(
619 commits=commits, target_head='a', source_head='b', revisions=['b'])
619 commits=commits, target_head='a', source_head='b', revisions=['b'])
620 pr_util.create_inline_comment(file_path='file_b')
620 pr_util.create_inline_comment(file_path='file_b')
621 pr_util.add_one_commit(head='c')
621 pr_util.add_one_commit(head='c')
622
622
623 assert_inline_comments(pull_request, visible=1, outdated=0)
623 assert_inline_comments(pull_request, visible=1, outdated=0)
624
624
625 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
625 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
626 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
626 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
627 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
627 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
628 change_lines = list(base_lines)
628 change_lines = list(base_lines)
629 change_lines.insert(6, 'line 6a added\n')
629 change_lines.insert(6, 'line 6a added\n')
630
630
631 # Changes on the last line of sight
631 # Changes on the last line of sight
632 update_lines = list(change_lines)
632 update_lines = list(change_lines)
633 update_lines[0] = 'line 1 changed\n'
633 update_lines[0] = 'line 1 changed\n'
634 update_lines[-1] = 'line 12 changed\n'
634 update_lines[-1] = 'line 12 changed\n'
635
635
636 def file_b(lines):
636 def file_b(lines):
637 return FileNode('file_b', ''.join(lines))
637 return FileNode('file_b', ''.join(lines))
638
638
639 commits = [
639 commits = [
640 {'message': 'a', 'added': [file_b(base_lines)]},
640 {'message': 'a', 'added': [file_b(base_lines)]},
641 {'message': 'b', 'changed': [file_b(change_lines)]},
641 {'message': 'b', 'changed': [file_b(change_lines)]},
642 {'message': 'c', 'changed': [file_b(update_lines)]},
642 {'message': 'c', 'changed': [file_b(update_lines)]},
643 ]
643 ]
644
644
645 pull_request = pr_util.create_pull_request(
645 pull_request = pr_util.create_pull_request(
646 commits=commits, target_head='a', source_head='b', revisions=['b'])
646 commits=commits, target_head='a', source_head='b', revisions=['b'])
647 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
647 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
648
648
649 with outdated_comments_patcher():
649 with outdated_comments_patcher():
650 pr_util.add_one_commit(head='c')
650 pr_util.add_one_commit(head='c')
651 assert_inline_comments(pull_request, visible=0, outdated=1)
651 assert_inline_comments(pull_request, visible=0, outdated=1)
652
652
653 @pytest.mark.parametrize("change, content", [
653 @pytest.mark.parametrize("change, content", [
654 ('changed', 'changed\n'),
654 ('changed', 'changed\n'),
655 ('removed', ''),
655 ('removed', ''),
656 ], ids=['changed', 'removed'])
656 ], ids=['changed', 'removed'])
657 def test_comment_flagged_on_change(self, pr_util, change, content):
657 def test_comment_flagged_on_change(self, pr_util, change, content):
658 commits = [
658 commits = [
659 {'message': 'a'},
659 {'message': 'a'},
660 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
660 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
661 {'message': 'c', change: [FileNode('file_b', content)]},
661 {'message': 'c', change: [FileNode('file_b', content)]},
662 ]
662 ]
663 pull_request = pr_util.create_pull_request(
663 pull_request = pr_util.create_pull_request(
664 commits=commits, target_head='a', source_head='b', revisions=['b'])
664 commits=commits, target_head='a', source_head='b', revisions=['b'])
665 pr_util.create_inline_comment(file_path='file_b')
665 pr_util.create_inline_comment(file_path='file_b')
666
666
667 with outdated_comments_patcher():
667 with outdated_comments_patcher():
668 pr_util.add_one_commit(head='c')
668 pr_util.add_one_commit(head='c')
669 assert_inline_comments(pull_request, visible=0, outdated=1)
669 assert_inline_comments(pull_request, visible=0, outdated=1)
670
670
671
671
672 @pytest.mark.usefixtures('config_stub')
672 @pytest.mark.usefixtures('config_stub')
673 class TestUpdateChangedFiles(object):
673 class TestUpdateChangedFiles(object):
674
674
675 def test_no_changes_on_unchanged_diff(self, pr_util):
675 def test_no_changes_on_unchanged_diff(self, pr_util):
676 commits = [
676 commits = [
677 {'message': 'a'},
677 {'message': 'a'},
678 {'message': 'b',
678 {'message': 'b',
679 'added': [FileNode('file_b', 'test_content b\n')]},
679 'added': [FileNode('file_b', 'test_content b\n')]},
680 {'message': 'c',
680 {'message': 'c',
681 'added': [FileNode('file_c', 'test_content c\n')]},
681 'added': [FileNode('file_c', 'test_content c\n')]},
682 ]
682 ]
683 # open a PR from a to b, adding file_b
683 # open a PR from a to b, adding file_b
684 pull_request = pr_util.create_pull_request(
684 pull_request = pr_util.create_pull_request(
685 commits=commits, target_head='a', source_head='b', revisions=['b'],
685 commits=commits, target_head='a', source_head='b', revisions=['b'],
686 name_suffix='per-file-review')
686 name_suffix='per-file-review')
687
687
688 # modify PR adding new file file_c
688 # modify PR adding new file file_c
689 pr_util.add_one_commit(head='c')
689 pr_util.add_one_commit(head='c')
690
690
691 assert_pr_file_changes(
691 assert_pr_file_changes(
692 pull_request,
692 pull_request,
693 added=['file_c'],
693 added=['file_c'],
694 modified=[],
694 modified=[],
695 removed=[])
695 removed=[])
696
696
697 def test_modify_and_undo_modification_diff(self, pr_util):
697 def test_modify_and_undo_modification_diff(self, pr_util):
698 commits = [
698 commits = [
699 {'message': 'a'},
699 {'message': 'a'},
700 {'message': 'b',
700 {'message': 'b',
701 'added': [FileNode('file_b', 'test_content b\n')]},
701 'added': [FileNode('file_b', 'test_content b\n')]},
702 {'message': 'c',
702 {'message': 'c',
703 'changed': [FileNode('file_b', 'test_content b modified\n')]},
703 'changed': [FileNode('file_b', 'test_content b modified\n')]},
704 {'message': 'd',
704 {'message': 'd',
705 'changed': [FileNode('file_b', 'test_content b\n')]},
705 'changed': [FileNode('file_b', 'test_content b\n')]},
706 ]
706 ]
707 # open a PR from a to b, adding file_b
707 # open a PR from a to b, adding file_b
708 pull_request = pr_util.create_pull_request(
708 pull_request = pr_util.create_pull_request(
709 commits=commits, target_head='a', source_head='b', revisions=['b'],
709 commits=commits, target_head='a', source_head='b', revisions=['b'],
710 name_suffix='per-file-review')
710 name_suffix='per-file-review')
711
711
712 # modify PR modifying file file_b
712 # modify PR modifying file file_b
713 pr_util.add_one_commit(head='c')
713 pr_util.add_one_commit(head='c')
714
714
715 assert_pr_file_changes(
715 assert_pr_file_changes(
716 pull_request,
716 pull_request,
717 added=[],
717 added=[],
718 modified=['file_b'],
718 modified=['file_b'],
719 removed=[])
719 removed=[])
720
720
721 # move the head again to d, which rollbacks change,
721 # move the head again to d, which rollbacks change,
722 # meaning we should indicate no changes
722 # meaning we should indicate no changes
723 pr_util.add_one_commit(head='d')
723 pr_util.add_one_commit(head='d')
724
724
725 assert_pr_file_changes(
725 assert_pr_file_changes(
726 pull_request,
726 pull_request,
727 added=[],
727 added=[],
728 modified=[],
728 modified=[],
729 removed=[])
729 removed=[])
730
730
731 def test_updated_all_files_in_pr(self, pr_util):
731 def test_updated_all_files_in_pr(self, pr_util):
732 commits = [
732 commits = [
733 {'message': 'a'},
733 {'message': 'a'},
734 {'message': 'b', 'added': [
734 {'message': 'b', 'added': [
735 FileNode('file_a', 'test_content a\n'),
735 FileNode('file_a', 'test_content a\n'),
736 FileNode('file_b', 'test_content b\n'),
736 FileNode('file_b', 'test_content b\n'),
737 FileNode('file_c', 'test_content c\n')]},
737 FileNode('file_c', 'test_content c\n')]},
738 {'message': 'c', 'changed': [
738 {'message': 'c', 'changed': [
739 FileNode('file_a', 'test_content a changed\n'),
739 FileNode('file_a', 'test_content a changed\n'),
740 FileNode('file_b', 'test_content b changed\n'),
740 FileNode('file_b', 'test_content b changed\n'),
741 FileNode('file_c', 'test_content c changed\n')]},
741 FileNode('file_c', 'test_content c changed\n')]},
742 ]
742 ]
743 # open a PR from a to b, changing 3 files
743 # open a PR from a to b, changing 3 files
744 pull_request = pr_util.create_pull_request(
744 pull_request = pr_util.create_pull_request(
745 commits=commits, target_head='a', source_head='b', revisions=['b'],
745 commits=commits, target_head='a', source_head='b', revisions=['b'],
746 name_suffix='per-file-review')
746 name_suffix='per-file-review')
747
747
748 pr_util.add_one_commit(head='c')
748 pr_util.add_one_commit(head='c')
749
749
750 assert_pr_file_changes(
750 assert_pr_file_changes(
751 pull_request,
751 pull_request,
752 added=[],
752 added=[],
753 modified=['file_a', 'file_b', 'file_c'],
753 modified=['file_a', 'file_b', 'file_c'],
754 removed=[])
754 removed=[])
755
755
756 def test_updated_and_removed_all_files_in_pr(self, pr_util):
756 def test_updated_and_removed_all_files_in_pr(self, pr_util):
757 commits = [
757 commits = [
758 {'message': 'a'},
758 {'message': 'a'},
759 {'message': 'b', 'added': [
759 {'message': 'b', 'added': [
760 FileNode('file_a', 'test_content a\n'),
760 FileNode('file_a', 'test_content a\n'),
761 FileNode('file_b', 'test_content b\n'),
761 FileNode('file_b', 'test_content b\n'),
762 FileNode('file_c', 'test_content c\n')]},
762 FileNode('file_c', 'test_content c\n')]},
763 {'message': 'c', 'removed': [
763 {'message': 'c', 'removed': [
764 FileNode('file_a', 'test_content a changed\n'),
764 FileNode('file_a', 'test_content a changed\n'),
765 FileNode('file_b', 'test_content b changed\n'),
765 FileNode('file_b', 'test_content b changed\n'),
766 FileNode('file_c', 'test_content c changed\n')]},
766 FileNode('file_c', 'test_content c changed\n')]},
767 ]
767 ]
768 # open a PR from a to b, removing 3 files
768 # open a PR from a to b, removing 3 files
769 pull_request = pr_util.create_pull_request(
769 pull_request = pr_util.create_pull_request(
770 commits=commits, target_head='a', source_head='b', revisions=['b'],
770 commits=commits, target_head='a', source_head='b', revisions=['b'],
771 name_suffix='per-file-review')
771 name_suffix='per-file-review')
772
772
773 pr_util.add_one_commit(head='c')
773 pr_util.add_one_commit(head='c')
774
774
775 assert_pr_file_changes(
775 assert_pr_file_changes(
776 pull_request,
776 pull_request,
777 added=[],
777 added=[],
778 modified=[],
778 modified=[],
779 removed=['file_a', 'file_b', 'file_c'])
779 removed=['file_a', 'file_b', 'file_c'])
780
780
781
781
782 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
782 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
783 model = PullRequestModel()
783 model = PullRequestModel()
784 pull_request = pr_util.create_pull_request()
784 pull_request = pr_util.create_pull_request()
785 pr_util.update_source_repository()
785 pr_util.update_source_repository()
786
786
787 model.update_commits(pull_request)
787 model.update_commits(pull_request)
788
788
789 # Expect that it has a version entry now
789 # Expect that it has a version entry now
790 assert len(model.get_versions(pull_request)) == 1
790 assert len(model.get_versions(pull_request)) == 1
791
791
792
792
793 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
793 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
794 pull_request = pr_util.create_pull_request()
794 pull_request = pr_util.create_pull_request()
795 model = PullRequestModel()
795 model = PullRequestModel()
796 model.update_commits(pull_request)
796 model.update_commits(pull_request)
797
797
798 # Expect that it still has no versions
798 # Expect that it still has no versions
799 assert len(model.get_versions(pull_request)) == 0
799 assert len(model.get_versions(pull_request)) == 0
800
800
801
801
802 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
802 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
803 model = PullRequestModel()
803 model = PullRequestModel()
804 pull_request = pr_util.create_pull_request()
804 pull_request = pr_util.create_pull_request()
805 comment = pr_util.create_comment()
805 comment = pr_util.create_comment()
806 pr_util.update_source_repository()
806 pr_util.update_source_repository()
807
807
808 model.update_commits(pull_request)
808 model.update_commits(pull_request)
809
809
810 # Expect that the comment is linked to the pr version now
810 # Expect that the comment is linked to the pr version now
811 assert comment.pull_request_version == model.get_versions(pull_request)[0]
811 assert comment.pull_request_version == model.get_versions(pull_request)[0]
812
812
813
813
814 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
814 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
815 model = PullRequestModel()
815 model = PullRequestModel()
816 pull_request = pr_util.create_pull_request()
816 pull_request = pr_util.create_pull_request()
817 pr_util.update_source_repository()
817 pr_util.update_source_repository()
818 pr_util.update_source_repository()
818 pr_util.update_source_repository()
819
819
820 model.update_commits(pull_request)
820 model.update_commits(pull_request)
821
821
822 # Expect to find a new comment about the change
822 # Expect to find a new comment about the change
823 expected_message = textwrap.dedent(
823 expected_message = textwrap.dedent(
824 """\
824 """\
825 Pull request updated. Auto status change to |under_review|
825 Pull request updated. Auto status change to |under_review|
826
826
827 .. role:: added
827 .. role:: added
828 .. role:: removed
828 .. role:: removed
829 .. parsed-literal::
829 .. parsed-literal::
830
830
831 Changed commits:
831 Changed commits:
832 * :added:`1 added`
832 * :added:`1 added`
833 * :removed:`0 removed`
833 * :removed:`0 removed`
834
834
835 Changed files:
835 Changed files:
836 * `A file_2 <#a_c--92ed3b5f07b4>`_
836 * `A file_2 <#a_c--92ed3b5f07b4>`_
837
837
838 .. |under_review| replace:: *"Under Review"*"""
838 .. |under_review| replace:: *"Under Review"*"""
839 )
839 )
840 pull_request_comments = sorted(
840 pull_request_comments = sorted(
841 pull_request.comments, key=lambda c: c.modified_at)
841 pull_request.comments, key=lambda c: c.modified_at)
842 update_comment = pull_request_comments[-1]
842 update_comment = pull_request_comments[-1]
843 assert update_comment.text == expected_message
843 assert update_comment.text == expected_message
844
844
845
845
846 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
846 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
847 pull_request = pr_util.create_pull_request()
847 pull_request = pr_util.create_pull_request()
848
848
849 # Avoiding default values
849 # Avoiding default values
850 pull_request.status = PullRequest.STATUS_CLOSED
850 pull_request.status = PullRequest.STATUS_CLOSED
851 pull_request._last_merge_source_rev = "0" * 40
851 pull_request._last_merge_source_rev = "0" * 40
852 pull_request._last_merge_target_rev = "1" * 40
852 pull_request._last_merge_target_rev = "1" * 40
853 pull_request.last_merge_status = 1
853 pull_request.last_merge_status = 1
854 pull_request.merge_rev = "2" * 40
854 pull_request.merge_rev = "2" * 40
855
855
856 # Remember automatic values
856 # Remember automatic values
857 created_on = pull_request.created_on
857 created_on = pull_request.created_on
858 updated_on = pull_request.updated_on
858 updated_on = pull_request.updated_on
859
859
860 # Create a new version of the pull request
860 # Create a new version of the pull request
861 version = PullRequestModel()._create_version_from_snapshot(pull_request)
861 version = PullRequestModel()._create_version_from_snapshot(pull_request)
862
862
863 # Check attributes
863 # Check attributes
864 assert version.title == pr_util.create_parameters['title']
864 assert version.title == pr_util.create_parameters['title']
865 assert version.description == pr_util.create_parameters['description']
865 assert version.description == pr_util.create_parameters['description']
866 assert version.status == PullRequest.STATUS_CLOSED
866 assert version.status == PullRequest.STATUS_CLOSED
867
867
868 # versions get updated created_on
868 # versions get updated created_on
869 assert version.created_on != created_on
869 assert version.created_on != created_on
870
870
871 assert version.updated_on == updated_on
871 assert version.updated_on == updated_on
872 assert version.user_id == pull_request.user_id
872 assert version.user_id == pull_request.user_id
873 assert version.revisions == pr_util.create_parameters['revisions']
873 assert version.revisions == pr_util.create_parameters['revisions']
874 assert version.source_repo == pr_util.source_repository
874 assert version.source_repo == pr_util.source_repository
875 assert version.source_ref == pr_util.create_parameters['source_ref']
875 assert version.source_ref == pr_util.create_parameters['source_ref']
876 assert version.target_repo == pr_util.target_repository
876 assert version.target_repo == pr_util.target_repository
877 assert version.target_ref == pr_util.create_parameters['target_ref']
877 assert version.target_ref == pr_util.create_parameters['target_ref']
878 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
878 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
879 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
879 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
880 assert version.last_merge_status == pull_request.last_merge_status
880 assert version.last_merge_status == pull_request.last_merge_status
881 assert version.merge_rev == pull_request.merge_rev
881 assert version.merge_rev == pull_request.merge_rev
882 assert version.pull_request == pull_request
882 assert version.pull_request == pull_request
883
883
884
884
885 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
885 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
886 version1 = pr_util.create_version_of_pull_request()
886 version1 = pr_util.create_version_of_pull_request()
887 comment_linked = pr_util.create_comment(linked_to=version1)
887 comment_linked = pr_util.create_comment(linked_to=version1)
888 comment_unlinked = pr_util.create_comment()
888 comment_unlinked = pr_util.create_comment()
889 version2 = pr_util.create_version_of_pull_request()
889 version2 = pr_util.create_version_of_pull_request()
890
890
891 PullRequestModel()._link_comments_to_version(version2)
891 PullRequestModel()._link_comments_to_version(version2)
892
892
893 # Expect that only the new comment is linked to version2
893 # Expect that only the new comment is linked to version2
894 assert (
894 assert (
895 comment_unlinked.pull_request_version_id ==
895 comment_unlinked.pull_request_version_id ==
896 version2.pull_request_version_id)
896 version2.pull_request_version_id)
897 assert (
897 assert (
898 comment_linked.pull_request_version_id ==
898 comment_linked.pull_request_version_id ==
899 version1.pull_request_version_id)
899 version1.pull_request_version_id)
900 assert (
900 assert (
901 comment_unlinked.pull_request_version_id !=
901 comment_unlinked.pull_request_version_id !=
902 comment_linked.pull_request_version_id)
902 comment_linked.pull_request_version_id)
903
903
904
904
905 def test_calculate_commits():
905 def test_calculate_commits():
906 old_ids = [1, 2, 3]
906 old_ids = [1, 2, 3]
907 new_ids = [1, 3, 4, 5]
907 new_ids = [1, 3, 4, 5]
908 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
908 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
909 assert change.added == [4, 5]
909 assert change.added == [4, 5]
910 assert change.common == [1, 3]
910 assert change.common == [1, 3]
911 assert change.removed == [2]
911 assert change.removed == [2]
912 assert change.total == [1, 3, 4, 5]
912 assert change.total == [1, 3, 4, 5]
913
913
914
914
915 def assert_inline_comments(pull_request, visible=None, outdated=None):
915 def assert_inline_comments(pull_request, visible=None, outdated=None):
916 if visible is not None:
916 if visible is not None:
917 inline_comments = CommentsModel().get_inline_comments(
917 inline_comments = CommentsModel().get_inline_comments(
918 pull_request.target_repo.repo_id, pull_request=pull_request)
918 pull_request.target_repo.repo_id, pull_request=pull_request)
919 inline_cnt = CommentsModel().get_inline_comments_count(
919 inline_cnt = CommentsModel().get_inline_comments_count(
920 inline_comments)
920 inline_comments)
921 assert inline_cnt == visible
921 assert inline_cnt == visible
922 if outdated is not None:
922 if outdated is not None:
923 outdated_comments = CommentsModel().get_outdated_comments(
923 outdated_comments = CommentsModel().get_outdated_comments(
924 pull_request.target_repo.repo_id, pull_request)
924 pull_request.target_repo.repo_id, pull_request)
925 assert len(outdated_comments) == outdated
925 assert len(outdated_comments) == outdated
926
926
927
927
928 def assert_pr_file_changes(
928 def assert_pr_file_changes(
929 pull_request, added=None, modified=None, removed=None):
929 pull_request, added=None, modified=None, removed=None):
930 pr_versions = PullRequestModel().get_versions(pull_request)
930 pr_versions = PullRequestModel().get_versions(pull_request)
931 # always use first version, ie original PR to calculate changes
931 # always use first version, ie original PR to calculate changes
932 pull_request_version = pr_versions[0]
932 pull_request_version = pr_versions[0]
933 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
933 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
934 pull_request, pull_request_version)
934 pull_request, pull_request_version)
935 file_changes = PullRequestModel()._calculate_file_changes(
935 file_changes = PullRequestModel()._calculate_file_changes(
936 old_diff_data, new_diff_data)
936 old_diff_data, new_diff_data)
937
937
938 assert added == file_changes.added, \
938 assert added == file_changes.added, \
939 'expected added:%s vs value:%s' % (added, file_changes.added)
939 'expected added:%s vs value:%s' % (added, file_changes.added)
940 assert modified == file_changes.modified, \
940 assert modified == file_changes.modified, \
941 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
941 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
942 assert removed == file_changes.removed, \
942 assert removed == file_changes.removed, \
943 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
943 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
944
944
945
945
946 def outdated_comments_patcher(use_outdated=True):
946 def outdated_comments_patcher(use_outdated=True):
947 return mock.patch.object(
947 return mock.patch.object(
948 CommentsModel, 'use_outdated_comments',
948 CommentsModel, 'use_outdated_comments',
949 return_value=use_outdated)
949 return_value=use_outdated)
@@ -1,106 +1,106 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib.vcs import nodes
23 from rhodecode.lib.vcs import nodes
24 from rhodecode.model.repo import ReadmeFinder
24 from rhodecode.model.repo import ReadmeFinder
25
25
26
26
27 @pytest.fixture
27 @pytest.fixture()
28 def commit_util(vcsbackend_stub):
28 def commit_util(vcsbackend_stub):
29 """
29 """
30 Provide a commit which has certain files in it's tree.
30 Provide a commit which has certain files in it's tree.
31
31
32 This is based on the fixture "vcsbackend" and will automatically be
32 This is based on the fixture "vcsbackend" and will automatically be
33 parametrized for all vcs backends.
33 parametrized for all vcs backends.
34 """
34 """
35 return CommitUtility(vcsbackend_stub)
35 return CommitUtility(vcsbackend_stub)
36
36
37
37
38 class CommitUtility:
38 class CommitUtility:
39
39
40 def __init__(self, vcsbackend):
40 def __init__(self, vcsbackend):
41 self.vcsbackend = vcsbackend
41 self.vcsbackend = vcsbackend
42
42
43 def commit_with_files(self, filenames):
43 def commit_with_files(self, filenames):
44 commits = [
44 commits = [
45 {'message': 'Adding all requested files',
45 {'message': 'Adding all requested files',
46 'added': [
46 'added': [
47 nodes.FileNode(filename, content='')
47 nodes.FileNode(filename, content='')
48 for filename in filenames
48 for filename in filenames
49 ]}]
49 ]}]
50 repo = self.vcsbackend.create_repo(commits=commits)
50 repo = self.vcsbackend.create_repo(commits=commits)
51 return repo.get_commit()
51 return repo.get_commit()
52
52
53
53
54 def test_no_matching_file_returns_none(commit_util):
54 def test_no_matching_file_returns_none(commit_util):
55 commit = commit_util.commit_with_files(['LIESMICH'])
55 commit = commit_util.commit_with_files(['LIESMICH'])
56 finder = ReadmeFinder(default_renderer='rst')
56 finder = ReadmeFinder(default_renderer='rst')
57 filenode = finder.search(commit)
57 filenode = finder.search(commit)
58 assert filenode is None
58 assert filenode is None
59
59
60
60
61 def test_matching_file_returns_the_file_name(commit_util):
61 def test_matching_file_returns_the_file_name(commit_util):
62 commit = commit_util.commit_with_files(['README'])
62 commit = commit_util.commit_with_files(['README'])
63 finder = ReadmeFinder(default_renderer='rst')
63 finder = ReadmeFinder(default_renderer='rst')
64 filenode = finder.search(commit)
64 filenode = finder.search(commit)
65 assert filenode.path == 'README'
65 assert filenode.path == 'README'
66
66
67
67
68 def test_matching_file_with_extension(commit_util):
68 def test_matching_file_with_extension(commit_util):
69 commit = commit_util.commit_with_files(['README.rst'])
69 commit = commit_util.commit_with_files(['README.rst'])
70 finder = ReadmeFinder(default_renderer='rst')
70 finder = ReadmeFinder(default_renderer='rst')
71 filenode = finder.search(commit)
71 filenode = finder.search(commit)
72 assert filenode.path == 'README.rst'
72 assert filenode.path == 'README.rst'
73
73
74
74
75 def test_prefers_readme_without_extension(commit_util):
75 def test_prefers_readme_without_extension(commit_util):
76 commit = commit_util.commit_with_files(['README.rst', 'Readme'])
76 commit = commit_util.commit_with_files(['README.rst', 'Readme'])
77 finder = ReadmeFinder()
77 finder = ReadmeFinder()
78 filenode = finder.search(commit)
78 filenode = finder.search(commit)
79 assert filenode.path == 'Readme'
79 assert filenode.path == 'Readme'
80
80
81
81
82 @pytest.mark.parametrize('renderer, expected', [
82 @pytest.mark.parametrize('renderer, expected', [
83 ('rst', 'readme.rst'),
83 ('rst', 'readme.rst'),
84 ('markdown', 'readme.md'),
84 ('markdown', 'readme.md'),
85 ])
85 ])
86 def test_prefers_renderer_extensions(commit_util, renderer, expected):
86 def test_prefers_renderer_extensions(commit_util, renderer, expected):
87 commit = commit_util.commit_with_files(
87 commit = commit_util.commit_with_files(
88 ['readme.rst', 'readme.md', 'readme.txt'])
88 ['readme.rst', 'readme.md', 'readme.txt'])
89 finder = ReadmeFinder(default_renderer=renderer)
89 finder = ReadmeFinder(default_renderer=renderer)
90 filenode = finder.search(commit)
90 filenode = finder.search(commit)
91 assert filenode.path == expected
91 assert filenode.path == expected
92
92
93
93
94 def test_finds_readme_in_subdirectory(commit_util):
94 def test_finds_readme_in_subdirectory(commit_util):
95 commit = commit_util.commit_with_files(['doc/README.rst', 'LIESMICH'])
95 commit = commit_util.commit_with_files(['doc/README.rst', 'LIESMICH'])
96 finder = ReadmeFinder()
96 finder = ReadmeFinder()
97 filenode = finder.search(commit)
97 filenode = finder.search(commit)
98 assert filenode.path == 'doc/README.rst'
98 assert filenode.path == 'doc/README.rst'
99
99
100
100
101 def test_prefers_subdirectory_with_priority(commit_util):
101 def test_prefers_subdirectory_with_priority(commit_util):
102 commit = commit_util.commit_with_files(
102 commit = commit_util.commit_with_files(
103 ['Doc/Readme.rst', 'Docs/Readme.rst'])
103 ['Doc/Readme.rst', 'Docs/Readme.rst'])
104 finder = ReadmeFinder()
104 finder = ReadmeFinder()
105 filenode = finder.search(commit)
105 filenode = finder.search(commit)
106 assert filenode.path == 'Doc/Readme.rst'
106 assert filenode.path == 'Doc/Readme.rst'
@@ -1,328 +1,328 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22 import mock
22 import mock
23
23
24 from rhodecode.lib.utils2 import safe_unicode
24 from rhodecode.lib.utils2 import safe_unicode
25 from rhodecode.model.db import (
25 from rhodecode.model.db import (
26 true, User, UserGroup, UserGroupMember, UserEmailMap, Permission, UserIpMap)
26 true, User, UserGroup, UserGroupMember, UserEmailMap, Permission, UserIpMap)
27 from rhodecode.model.meta import Session
27 from rhodecode.model.meta import Session
28 from rhodecode.model.user import UserModel
28 from rhodecode.model.user import UserModel
29 from rhodecode.model.user_group import UserGroupModel
29 from rhodecode.model.user_group import UserGroupModel
30 from rhodecode.model.repo import RepoModel
30 from rhodecode.model.repo import RepoModel
31 from rhodecode.model.repo_group import RepoGroupModel
31 from rhodecode.model.repo_group import RepoGroupModel
32 from rhodecode.tests.fixture import Fixture
32 from rhodecode.tests.fixture import Fixture
33
33
34 fixture = Fixture()
34 fixture = Fixture()
35
35
36
36
37 class TestGetUsers(object):
37 class TestGetUsers(object):
38 def test_returns_active_users(self, backend, user_util):
38 def test_returns_active_users(self, backend, user_util):
39 for i in range(4):
39 for i in range(4):
40 is_active = i % 2 == 0
40 is_active = i % 2 == 0
41 user_util.create_user(active=is_active, lastname='Fake user')
41 user_util.create_user(active=is_active, lastname='Fake user')
42
42
43 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
43 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
44 with mock.patch('rhodecode.lib.helpers.link_to_user'):
44 with mock.patch('rhodecode.lib.helpers.link_to_user'):
45 users = UserModel().get_users()
45 users = UserModel().get_users()
46 fake_users = [u for u in users if u['last_name'] == 'Fake user']
46 fake_users = [u for u in users if u['last_name'] == 'Fake user']
47 assert len(fake_users) == 2
47 assert len(fake_users) == 2
48
48
49 expected_keys = (
49 expected_keys = (
50 'id', 'first_name', 'last_name', 'username', 'icon_link',
50 'id', 'first_name', 'last_name', 'username', 'icon_link',
51 'value_display', 'value', 'value_type')
51 'value_display', 'value', 'value_type')
52 for user in users:
52 for user in users:
53 assert user['value_type'] is 'user'
53 assert user['value_type'] is 'user'
54 for key in expected_keys:
54 for key in expected_keys:
55 assert key in user
55 assert key in user
56
56
57 def test_returns_user_filtered_by_last_name(self, backend, user_util):
57 def test_returns_user_filtered_by_last_name(self, backend, user_util):
58 keywords = ('aBc', u'ünicode')
58 keywords = ('aBc', u'ünicode')
59 for keyword in keywords:
59 for keyword in keywords:
60 for i in range(2):
60 for i in range(2):
61 user_util.create_user(
61 user_util.create_user(
62 active=True, lastname=u'Fake {} user'.format(keyword))
62 active=True, lastname=u'Fake {} user'.format(keyword))
63
63
64 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
64 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
65 with mock.patch('rhodecode.lib.helpers.link_to_user'):
65 with mock.patch('rhodecode.lib.helpers.link_to_user'):
66 keyword = keywords[1].lower()
66 keyword = keywords[1].lower()
67 users = UserModel().get_users(name_contains=keyword)
67 users = UserModel().get_users(name_contains=keyword)
68
68
69 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
69 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
70 assert len(fake_users) == 2
70 assert len(fake_users) == 2
71 for user in fake_users:
71 for user in fake_users:
72 assert user['last_name'] == safe_unicode('Fake ünicode user')
72 assert user['last_name'] == safe_unicode('Fake ünicode user')
73
73
74 def test_returns_user_filtered_by_first_name(self, backend, user_util):
74 def test_returns_user_filtered_by_first_name(self, backend, user_util):
75 created_users = []
75 created_users = []
76 keywords = ('aBc', u'ünicode')
76 keywords = ('aBc', u'ünicode')
77 for keyword in keywords:
77 for keyword in keywords:
78 for i in range(2):
78 for i in range(2):
79 created_users.append(user_util.create_user(
79 created_users.append(user_util.create_user(
80 active=True, lastname='Fake user',
80 active=True, lastname='Fake user',
81 firstname=u'Fake {} user'.format(keyword)))
81 firstname=u'Fake {} user'.format(keyword)))
82
82
83 keyword = keywords[1].lower()
83 keyword = keywords[1].lower()
84 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
84 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
85 with mock.patch('rhodecode.lib.helpers.link_to_user'):
85 with mock.patch('rhodecode.lib.helpers.link_to_user'):
86 users = UserModel().get_users(name_contains=keyword)
86 users = UserModel().get_users(name_contains=keyword)
87
87
88 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
88 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
89 assert len(fake_users) == 2
89 assert len(fake_users) == 2
90 for user in fake_users:
90 for user in fake_users:
91 assert user['first_name'] == safe_unicode('Fake ünicode user')
91 assert user['first_name'] == safe_unicode('Fake ünicode user')
92
92
93 def test_returns_user_filtered_by_username(self, backend, user_util):
93 def test_returns_user_filtered_by_username(self, backend, user_util):
94 created_users = []
94 created_users = []
95 for i in range(5):
95 for i in range(5):
96 created_users.append(user_util.create_user(
96 created_users.append(user_util.create_user(
97 active=True, lastname='Fake user'))
97 active=True, lastname='Fake user'))
98
98
99 user_filter = created_users[-1].username[-2:]
99 user_filter = created_users[-1].username[-2:]
100 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
100 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
101 with mock.patch('rhodecode.lib.helpers.link_to_user'):
101 with mock.patch('rhodecode.lib.helpers.link_to_user'):
102 users = UserModel().get_users(name_contains=user_filter)
102 users = UserModel().get_users(name_contains=user_filter)
103
103
104 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
104 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
105 assert len(fake_users) == 1
105 assert len(fake_users) == 1
106 assert fake_users[0]['username'] == created_users[-1].username
106 assert fake_users[0]['username'] == created_users[-1].username
107
107
108 def test_returns_limited_user_list(self, backend, user_util):
108 def test_returns_limited_user_list(self, backend, user_util):
109 created_users = []
109 created_users = []
110 for i in range(5):
110 for i in range(5):
111 created_users.append(user_util.create_user(
111 created_users.append(user_util.create_user(
112 active=True, lastname='Fake user'))
112 active=True, lastname='Fake user'))
113
113
114 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
114 with mock.patch('rhodecode.lib.helpers.gravatar_url'):
115 with mock.patch('rhodecode.lib.helpers.link_to_user'):
115 with mock.patch('rhodecode.lib.helpers.link_to_user'):
116 users = UserModel().get_users(name_contains='Fake', limit=3)
116 users = UserModel().get_users(name_contains='Fake', limit=3)
117
117
118 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
118 fake_users = [u for u in users if u['last_name'].startswith('Fake')]
119 assert len(fake_users) == 3
119 assert len(fake_users) == 3
120
120
121
121
122 @pytest.fixture
122 @pytest.fixture()
123 def test_user(request, baseapp):
123 def test_user(request, baseapp):
124 usr = UserModel().create_or_update(
124 usr = UserModel().create_or_update(
125 username=u'test_user',
125 username=u'test_user',
126 password=u'qweqwe',
126 password=u'qweqwe',
127 email=u'main_email@rhodecode.org',
127 email=u'main_email@rhodecode.org',
128 firstname=u'u1', lastname=u'u1')
128 firstname=u'u1', lastname=u'u1')
129 Session().commit()
129 Session().commit()
130 assert User.get_by_username(u'test_user') == usr
130 assert User.get_by_username(u'test_user') == usr
131
131
132 @request.addfinalizer
132 @request.addfinalizer
133 def cleanup():
133 def cleanup():
134 if UserModel().get_user(usr.user_id) is None:
134 if UserModel().get_user(usr.user_id) is None:
135 return
135 return
136
136
137 perm = Permission.query().all()
137 perm = Permission.query().all()
138 for p in perm:
138 for p in perm:
139 UserModel().revoke_perm(usr, p)
139 UserModel().revoke_perm(usr, p)
140
140
141 UserModel().delete(usr.user_id)
141 UserModel().delete(usr.user_id)
142 Session().commit()
142 Session().commit()
143
143
144 return usr
144 return usr
145
145
146
146
147 def test_create_and_remove(test_user):
147 def test_create_and_remove(test_user):
148 usr = test_user
148 usr = test_user
149
149
150 # make user group
150 # make user group
151 user_group = fixture.create_user_group('some_example_group')
151 user_group = fixture.create_user_group('some_example_group')
152 Session().commit()
152 Session().commit()
153
153
154 UserGroupModel().add_user_to_group(user_group, usr)
154 UserGroupModel().add_user_to_group(user_group, usr)
155 Session().commit()
155 Session().commit()
156
156
157 assert UserGroup.get(user_group.users_group_id) == user_group
157 assert UserGroup.get(user_group.users_group_id) == user_group
158 assert UserGroupMember.query().count() == 1
158 assert UserGroupMember.query().count() == 1
159 UserModel().delete(usr.user_id)
159 UserModel().delete(usr.user_id)
160 Session().commit()
160 Session().commit()
161
161
162 assert UserGroupMember.query().all() == []
162 assert UserGroupMember.query().all() == []
163
163
164
164
165 def test_additonal_email_as_main(test_user):
165 def test_additonal_email_as_main(test_user):
166 with pytest.raises(AttributeError):
166 with pytest.raises(AttributeError):
167 m = UserEmailMap()
167 m = UserEmailMap()
168 m.email = test_user.email
168 m.email = test_user.email
169 m.user = test_user
169 m.user = test_user
170 Session().add(m)
170 Session().add(m)
171 Session().commit()
171 Session().commit()
172
172
173
173
174 def test_extra_email_map(test_user):
174 def test_extra_email_map(test_user):
175
175
176 m = UserEmailMap()
176 m = UserEmailMap()
177 m.email = u'main_email2@rhodecode.org'
177 m.email = u'main_email2@rhodecode.org'
178 m.user = test_user
178 m.user = test_user
179 Session().add(m)
179 Session().add(m)
180 Session().commit()
180 Session().commit()
181
181
182 u = User.get_by_email(email='main_email@rhodecode.org')
182 u = User.get_by_email(email='main_email@rhodecode.org')
183 assert test_user.user_id == u.user_id
183 assert test_user.user_id == u.user_id
184 assert test_user.username == u.username
184 assert test_user.username == u.username
185
185
186 u = User.get_by_email(email='main_email2@rhodecode.org')
186 u = User.get_by_email(email='main_email2@rhodecode.org')
187 assert test_user.user_id == u.user_id
187 assert test_user.user_id == u.user_id
188 assert test_user.username == u.username
188 assert test_user.username == u.username
189 u = User.get_by_email(email='main_email3@rhodecode.org')
189 u = User.get_by_email(email='main_email3@rhodecode.org')
190 assert u is None
190 assert u is None
191
191
192
192
193 def test_get_api_data_replaces_secret_data_by_default(test_user):
193 def test_get_api_data_replaces_secret_data_by_default(test_user):
194 api_data = test_user.get_api_data()
194 api_data = test_user.get_api_data()
195 api_key_length = 40
195 api_key_length = 40
196 expected_replacement = '*' * api_key_length
196 expected_replacement = '*' * api_key_length
197
197
198 for key in api_data['auth_tokens']:
198 for key in api_data['auth_tokens']:
199 assert key == expected_replacement
199 assert key == expected_replacement
200
200
201
201
202 def test_get_api_data_includes_secret_data_if_activated(test_user):
202 def test_get_api_data_includes_secret_data_if_activated(test_user):
203 api_data = test_user.get_api_data(include_secrets=True)
203 api_data = test_user.get_api_data(include_secrets=True)
204 assert api_data['auth_tokens'] == test_user.auth_tokens
204 assert api_data['auth_tokens'] == test_user.auth_tokens
205
205
206
206
207 def test_add_perm(test_user):
207 def test_add_perm(test_user):
208 perm = Permission.query().all()[0]
208 perm = Permission.query().all()[0]
209 UserModel().grant_perm(test_user, perm)
209 UserModel().grant_perm(test_user, perm)
210 Session().commit()
210 Session().commit()
211 assert UserModel().has_perm(test_user, perm)
211 assert UserModel().has_perm(test_user, perm)
212
212
213
213
214 def test_has_perm(test_user):
214 def test_has_perm(test_user):
215 perm = Permission.query().all()
215 perm = Permission.query().all()
216 for p in perm:
216 for p in perm:
217 assert not UserModel().has_perm(test_user, p)
217 assert not UserModel().has_perm(test_user, p)
218
218
219
219
220 def test_revoke_perm(test_user):
220 def test_revoke_perm(test_user):
221 perm = Permission.query().all()[0]
221 perm = Permission.query().all()[0]
222 UserModel().grant_perm(test_user, perm)
222 UserModel().grant_perm(test_user, perm)
223 Session().commit()
223 Session().commit()
224 assert UserModel().has_perm(test_user, perm)
224 assert UserModel().has_perm(test_user, perm)
225
225
226 # revoke
226 # revoke
227 UserModel().revoke_perm(test_user, perm)
227 UserModel().revoke_perm(test_user, perm)
228 Session().commit()
228 Session().commit()
229 assert not UserModel().has_perm(test_user, perm)
229 assert not UserModel().has_perm(test_user, perm)
230
230
231
231
232 @pytest.mark.parametrize("ip_range, expected, expect_errors", [
232 @pytest.mark.parametrize("ip_range, expected, expect_errors", [
233 ('', [], False),
233 ('', [], False),
234 ('127.0.0.1', ['127.0.0.1'], False),
234 ('127.0.0.1', ['127.0.0.1'], False),
235 ('127.0.0.1,127.0.0.2', ['127.0.0.1', '127.0.0.2'], False),
235 ('127.0.0.1,127.0.0.2', ['127.0.0.1', '127.0.0.2'], False),
236 ('127.0.0.1 , 127.0.0.2', ['127.0.0.1', '127.0.0.2'], False),
236 ('127.0.0.1 , 127.0.0.2', ['127.0.0.1', '127.0.0.2'], False),
237 (
237 (
238 '127.0.0.1,172.172.172.0,127.0.0.2',
238 '127.0.0.1,172.172.172.0,127.0.0.2',
239 ['127.0.0.1', '172.172.172.0', '127.0.0.2'], False),
239 ['127.0.0.1', '172.172.172.0', '127.0.0.2'], False),
240 (
240 (
241 '127.0.0.1-127.0.0.5',
241 '127.0.0.1-127.0.0.5',
242 ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4', '127.0.0.5'],
242 ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4', '127.0.0.5'],
243 False),
243 False),
244 (
244 (
245 '127.0.0.1 - 127.0.0.5',
245 '127.0.0.1 - 127.0.0.5',
246 ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4', '127.0.0.5'],
246 ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4', '127.0.0.5'],
247 False
247 False
248 ),
248 ),
249 ('-', [], True),
249 ('-', [], True),
250 ('127.0.0.1-32', [], True),
250 ('127.0.0.1-32', [], True),
251 (
251 (
252 '127.0.0.1,127.0.0.1,127.0.0.1,127.0.0.1-127.0.0.2,127.0.0.2',
252 '127.0.0.1,127.0.0.1,127.0.0.1,127.0.0.1-127.0.0.2,127.0.0.2',
253 ['127.0.0.1', '127.0.0.2'], False),
253 ['127.0.0.1', '127.0.0.2'], False),
254 (
254 (
255 '127.0.0.1-127.0.0.2,127.0.0.4-127.0.0.6,',
255 '127.0.0.1-127.0.0.2,127.0.0.4-127.0.0.6,',
256 ['127.0.0.1', '127.0.0.2', '127.0.0.4', '127.0.0.5', '127.0.0.6'],
256 ['127.0.0.1', '127.0.0.2', '127.0.0.4', '127.0.0.5', '127.0.0.6'],
257 False
257 False
258 ),
258 ),
259 (
259 (
260 '127.0.0.1-127.0.0.2,127.0.0.1-127.0.0.6,',
260 '127.0.0.1-127.0.0.2,127.0.0.1-127.0.0.6,',
261 ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4', '127.0.0.5',
261 ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4', '127.0.0.5',
262 '127.0.0.6'],
262 '127.0.0.6'],
263 False
263 False
264 ),
264 ),
265 ])
265 ])
266 def test_ip_range_generator(ip_range, expected, expect_errors):
266 def test_ip_range_generator(ip_range, expected, expect_errors):
267 func = UserModel().parse_ip_range
267 func = UserModel().parse_ip_range
268 if expect_errors:
268 if expect_errors:
269 pytest.raises(ValueError, func, ip_range)
269 pytest.raises(ValueError, func, ip_range)
270 else:
270 else:
271 parsed_list = func(ip_range)
271 parsed_list = func(ip_range)
272 assert parsed_list == expected
272 assert parsed_list == expected
273
273
274
274
275 def test_user_delete_cascades_ip_whitelist(test_user):
275 def test_user_delete_cascades_ip_whitelist(test_user):
276 sample_ip = '1.1.1.1'
276 sample_ip = '1.1.1.1'
277 uid_map = UserIpMap(user_id=test_user.user_id, ip_addr=sample_ip)
277 uid_map = UserIpMap(user_id=test_user.user_id, ip_addr=sample_ip)
278 Session().add(uid_map)
278 Session().add(uid_map)
279 Session().delete(test_user)
279 Session().delete(test_user)
280 try:
280 try:
281 Session().flush()
281 Session().flush()
282 finally:
282 finally:
283 Session().rollback()
283 Session().rollback()
284
284
285
285
286 def test_account_for_deactivation_generation(test_user):
286 def test_account_for_deactivation_generation(test_user):
287 accounts = UserModel().get_accounts_in_creation_order(
287 accounts = UserModel().get_accounts_in_creation_order(
288 current_user=test_user)
288 current_user=test_user)
289 # current user should be #1 in the list
289 # current user should be #1 in the list
290 assert accounts[0] == test_user.user_id
290 assert accounts[0] == test_user.user_id
291 active_users = User.query().filter(User.active == true()).count()
291 active_users = User.query().filter(User.active == true()).count()
292 assert active_users == len(accounts)
292 assert active_users == len(accounts)
293
293
294
294
295 def test_user_delete_cascades_permissions_on_repo(backend, test_user):
295 def test_user_delete_cascades_permissions_on_repo(backend, test_user):
296 test_repo = backend.create_repo()
296 test_repo = backend.create_repo()
297 RepoModel().grant_user_permission(
297 RepoModel().grant_user_permission(
298 test_repo, test_user, 'repository.write')
298 test_repo, test_user, 'repository.write')
299 Session().commit()
299 Session().commit()
300
300
301 assert test_user.repo_to_perm
301 assert test_user.repo_to_perm
302
302
303 UserModel().delete(test_user)
303 UserModel().delete(test_user)
304 Session().commit()
304 Session().commit()
305
305
306
306
307 def test_user_delete_cascades_permissions_on_repo_group(
307 def test_user_delete_cascades_permissions_on_repo_group(
308 test_repo_group, test_user):
308 test_repo_group, test_user):
309 RepoGroupModel().grant_user_permission(
309 RepoGroupModel().grant_user_permission(
310 test_repo_group, test_user, 'group.write')
310 test_repo_group, test_user, 'group.write')
311 Session().commit()
311 Session().commit()
312
312
313 assert test_user.repo_group_to_perm
313 assert test_user.repo_group_to_perm
314
314
315 Session().delete(test_user)
315 Session().delete(test_user)
316 Session().commit()
316 Session().commit()
317
317
318
318
319 def test_user_delete_cascades_permissions_on_user_group(
319 def test_user_delete_cascades_permissions_on_user_group(
320 test_user_group, test_user):
320 test_user_group, test_user):
321 UserGroupModel().grant_user_permission(
321 UserGroupModel().grant_user_permission(
322 test_user_group, test_user, 'usergroup.write')
322 test_user_group, test_user, 'usergroup.write')
323 Session().commit()
323 Session().commit()
324
324
325 assert test_user.user_group_to_perm
325 assert test_user.user_group_to_perm
326
326
327 Session().delete(test_user)
327 Session().delete(test_user)
328 Session().commit()
328 Session().commit()
@@ -1,296 +1,296 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import formencode
22 import formencode
23 import pytest
23 import pytest
24
24
25 from rhodecode.tests import (
25 from rhodecode.tests import (
26 HG_REPO, TEST_USER_REGULAR2_EMAIL, TEST_USER_REGULAR2_LOGIN,
26 HG_REPO, TEST_USER_REGULAR2_EMAIL, TEST_USER_REGULAR2_LOGIN,
27 TEST_USER_REGULAR2_PASS, TEST_USER_ADMIN_LOGIN, TESTS_TMP_PATH)
27 TEST_USER_REGULAR2_PASS, TEST_USER_ADMIN_LOGIN, TESTS_TMP_PATH)
28
28
29 from rhodecode.model import validators as v
29 from rhodecode.model import validators as v
30 from rhodecode.model.user_group import UserGroupModel
30 from rhodecode.model.user_group import UserGroupModel
31
31
32 from rhodecode.model.meta import Session
32 from rhodecode.model.meta import Session
33 from rhodecode.model.repo_group import RepoGroupModel
33 from rhodecode.model.repo_group import RepoGroupModel
34 from rhodecode.model.db import ChangesetStatus, Repository
34 from rhodecode.model.db import ChangesetStatus, Repository
35 from rhodecode.model.changeset_status import ChangesetStatusModel
35 from rhodecode.model.changeset_status import ChangesetStatusModel
36 from rhodecode.tests.fixture import Fixture
36 from rhodecode.tests.fixture import Fixture
37
37
38 fixture = Fixture()
38 fixture = Fixture()
39
39
40 pytestmark = pytest.mark.usefixtures('baseapp')
40 pytestmark = pytest.mark.usefixtures('baseapp')
41
41
42
42
43 @pytest.fixture
43 @pytest.fixture()
44 def localizer():
44 def localizer():
45 def func(msg):
45 def func(msg):
46 return msg
46 return msg
47 return func
47 return func
48
48
49
49
50 def test_Message_extractor(localizer):
50 def test_Message_extractor(localizer):
51 validator = v.ValidUsername(localizer)
51 validator = v.ValidUsername(localizer)
52 pytest.raises(formencode.Invalid, validator.to_python, 'default')
52 pytest.raises(formencode.Invalid, validator.to_python, 'default')
53
53
54 class StateObj(object):
54 class StateObj(object):
55 pass
55 pass
56
56
57 pytest.raises(
57 pytest.raises(
58 formencode.Invalid, validator.to_python, 'default', StateObj)
58 formencode.Invalid, validator.to_python, 'default', StateObj)
59
59
60
60
61 def test_ValidUsername(localizer):
61 def test_ValidUsername(localizer):
62 validator = v.ValidUsername(localizer)
62 validator = v.ValidUsername(localizer)
63
63
64 pytest.raises(formencode.Invalid, validator.to_python, 'default')
64 pytest.raises(formencode.Invalid, validator.to_python, 'default')
65 pytest.raises(formencode.Invalid, validator.to_python, 'new_user')
65 pytest.raises(formencode.Invalid, validator.to_python, 'new_user')
66 pytest.raises(formencode.Invalid, validator.to_python, '.,')
66 pytest.raises(formencode.Invalid, validator.to_python, '.,')
67 pytest.raises(
67 pytest.raises(
68 formencode.Invalid, validator.to_python, TEST_USER_ADMIN_LOGIN)
68 formencode.Invalid, validator.to_python, TEST_USER_ADMIN_LOGIN)
69 assert 'test' == validator.to_python('test')
69 assert 'test' == validator.to_python('test')
70
70
71 validator = v.ValidUsername(localizer, edit=True, old_data={'user_id': 1})
71 validator = v.ValidUsername(localizer, edit=True, old_data={'user_id': 1})
72
72
73
73
74 def test_ValidRepoUser(localizer):
74 def test_ValidRepoUser(localizer):
75 validator = v.ValidRepoUser(localizer)
75 validator = v.ValidRepoUser(localizer)
76 pytest.raises(formencode.Invalid, validator.to_python, 'nouser')
76 pytest.raises(formencode.Invalid, validator.to_python, 'nouser')
77 assert TEST_USER_ADMIN_LOGIN == \
77 assert TEST_USER_ADMIN_LOGIN == \
78 validator.to_python(TEST_USER_ADMIN_LOGIN)
78 validator.to_python(TEST_USER_ADMIN_LOGIN)
79
79
80
80
81 def test_ValidUserGroup(localizer):
81 def test_ValidUserGroup(localizer):
82 validator = v.ValidUserGroup(localizer)
82 validator = v.ValidUserGroup(localizer)
83 pytest.raises(formencode.Invalid, validator.to_python, 'default')
83 pytest.raises(formencode.Invalid, validator.to_python, 'default')
84 pytest.raises(formencode.Invalid, validator.to_python, '.,')
84 pytest.raises(formencode.Invalid, validator.to_python, '.,')
85
85
86 gr = fixture.create_user_group('test')
86 gr = fixture.create_user_group('test')
87 gr2 = fixture.create_user_group('tes2')
87 gr2 = fixture.create_user_group('tes2')
88 Session().commit()
88 Session().commit()
89 pytest.raises(formencode.Invalid, validator.to_python, 'test')
89 pytest.raises(formencode.Invalid, validator.to_python, 'test')
90 assert gr.users_group_id is not None
90 assert gr.users_group_id is not None
91 validator = v.ValidUserGroup(localizer,
91 validator = v.ValidUserGroup(localizer,
92 edit=True,
92 edit=True,
93 old_data={'users_group_id': gr2.users_group_id})
93 old_data={'users_group_id': gr2.users_group_id})
94
94
95 pytest.raises(formencode.Invalid, validator.to_python, 'test')
95 pytest.raises(formencode.Invalid, validator.to_python, 'test')
96 pytest.raises(formencode.Invalid, validator.to_python, 'TesT')
96 pytest.raises(formencode.Invalid, validator.to_python, 'TesT')
97 pytest.raises(formencode.Invalid, validator.to_python, 'TEST')
97 pytest.raises(formencode.Invalid, validator.to_python, 'TEST')
98 UserGroupModel().delete(gr)
98 UserGroupModel().delete(gr)
99 UserGroupModel().delete(gr2)
99 UserGroupModel().delete(gr2)
100 Session().commit()
100 Session().commit()
101
101
102
102
103 @pytest.fixture(scope='function')
103 @pytest.fixture(scope='function')
104 def repo_group(request):
104 def repo_group(request):
105 model = RepoGroupModel()
105 model = RepoGroupModel()
106 gr = model.create(
106 gr = model.create(
107 group_name='test_gr', group_description='desc', just_db=True,
107 group_name='test_gr', group_description='desc', just_db=True,
108 owner=TEST_USER_ADMIN_LOGIN)
108 owner=TEST_USER_ADMIN_LOGIN)
109
109
110 def cleanup():
110 def cleanup():
111 model.delete(gr)
111 model.delete(gr)
112
112
113 request.addfinalizer(cleanup)
113 request.addfinalizer(cleanup)
114
114
115 return gr
115 return gr
116
116
117
117
118 def test_ValidRepoGroup_same_name_as_repo(localizer):
118 def test_ValidRepoGroup_same_name_as_repo(localizer):
119 validator = v.ValidRepoGroup(localizer)
119 validator = v.ValidRepoGroup(localizer)
120 with pytest.raises(formencode.Invalid) as excinfo:
120 with pytest.raises(formencode.Invalid) as excinfo:
121 validator.to_python({'group_name': HG_REPO})
121 validator.to_python({'group_name': HG_REPO})
122 expected_msg = 'Repository with name "vcs_test_hg" already exists'
122 expected_msg = 'Repository with name "vcs_test_hg" already exists'
123 assert expected_msg in str(excinfo.value)
123 assert expected_msg in str(excinfo.value)
124
124
125
125
126 def test_ValidRepoGroup_group_exists(localizer, repo_group):
126 def test_ValidRepoGroup_group_exists(localizer, repo_group):
127 validator = v.ValidRepoGroup(localizer)
127 validator = v.ValidRepoGroup(localizer)
128 with pytest.raises(formencode.Invalid) as excinfo:
128 with pytest.raises(formencode.Invalid) as excinfo:
129 validator.to_python({'group_name': repo_group.group_name})
129 validator.to_python({'group_name': repo_group.group_name})
130 expected_msg = 'Group "test_gr" already exists'
130 expected_msg = 'Group "test_gr" already exists'
131 assert expected_msg in str(excinfo.value)
131 assert expected_msg in str(excinfo.value)
132
132
133
133
134 def test_ValidRepoGroup_invalid_parent(localizer, repo_group):
134 def test_ValidRepoGroup_invalid_parent(localizer, repo_group):
135 validator = v.ValidRepoGroup(localizer, edit=True,
135 validator = v.ValidRepoGroup(localizer, edit=True,
136 old_data={'group_id': repo_group.group_id})
136 old_data={'group_id': repo_group.group_id})
137 with pytest.raises(formencode.Invalid) as excinfo:
137 with pytest.raises(formencode.Invalid) as excinfo:
138 validator.to_python({
138 validator.to_python({
139 'group_name': repo_group.group_name + 'n',
139 'group_name': repo_group.group_name + 'n',
140 'group_parent_id': repo_group.group_id,
140 'group_parent_id': repo_group.group_id,
141 })
141 })
142 expected_msg = 'Cannot assign this group as parent'
142 expected_msg = 'Cannot assign this group as parent'
143 assert expected_msg in str(excinfo.value)
143 assert expected_msg in str(excinfo.value)
144
144
145
145
146 def test_ValidRepoGroup_edit_group_no_root_permission(localizer, repo_group):
146 def test_ValidRepoGroup_edit_group_no_root_permission(localizer, repo_group):
147 validator = v.ValidRepoGroup(localizer,
147 validator = v.ValidRepoGroup(localizer,
148 edit=True, old_data={'group_id': repo_group.group_id},
148 edit=True, old_data={'group_id': repo_group.group_id},
149 can_create_in_root=False)
149 can_create_in_root=False)
150
150
151 # Cannot change parent
151 # Cannot change parent
152 with pytest.raises(formencode.Invalid) as excinfo:
152 with pytest.raises(formencode.Invalid) as excinfo:
153 validator.to_python({'group_parent_id': '25'})
153 validator.to_python({'group_parent_id': '25'})
154 expected_msg = 'no permission to store repository group in root location'
154 expected_msg = 'no permission to store repository group in root location'
155 assert expected_msg in str(excinfo.value)
155 assert expected_msg in str(excinfo.value)
156
156
157 # Chaning all the other fields is allowed
157 # Chaning all the other fields is allowed
158 validator.to_python({'group_name': 'foo', 'group_parent_id': '-1'})
158 validator.to_python({'group_name': 'foo', 'group_parent_id': '-1'})
159 validator.to_python(
159 validator.to_python(
160 {'user': TEST_USER_REGULAR2_LOGIN, 'group_parent_id': '-1'})
160 {'user': TEST_USER_REGULAR2_LOGIN, 'group_parent_id': '-1'})
161 validator.to_python({'group_description': 'bar', 'group_parent_id': '-1'})
161 validator.to_python({'group_description': 'bar', 'group_parent_id': '-1'})
162 validator.to_python({'enable_locking': 'true', 'group_parent_id': '-1'})
162 validator.to_python({'enable_locking': 'true', 'group_parent_id': '-1'})
163
163
164
164
165 def test_ValidPassword(localizer):
165 def test_ValidPassword(localizer):
166 validator = v.ValidPassword(localizer)
166 validator = v.ValidPassword(localizer)
167 assert 'lol' == validator.to_python('lol')
167 assert 'lol' == validator.to_python('lol')
168 assert None == validator.to_python(None)
168 assert None == validator.to_python(None)
169 pytest.raises(formencode.Invalid, validator.to_python, 'ąćżź')
169 pytest.raises(formencode.Invalid, validator.to_python, 'ąćżź')
170
170
171
171
172 def test_ValidPasswordsMatch(localizer):
172 def test_ValidPasswordsMatch(localizer):
173 validator = v.ValidPasswordsMatch(localizer)
173 validator = v.ValidPasswordsMatch(localizer)
174 pytest.raises(
174 pytest.raises(
175 formencode.Invalid,
175 formencode.Invalid,
176 validator.to_python, {'password': 'pass',
176 validator.to_python, {'password': 'pass',
177 'password_confirmation': 'pass2'})
177 'password_confirmation': 'pass2'})
178
178
179 pytest.raises(
179 pytest.raises(
180 formencode.Invalid,
180 formencode.Invalid,
181 validator.to_python, {'new_password': 'pass',
181 validator.to_python, {'new_password': 'pass',
182 'password_confirmation': 'pass2'})
182 'password_confirmation': 'pass2'})
183
183
184 assert {'new_password': 'pass', 'password_confirmation': 'pass'} == \
184 assert {'new_password': 'pass', 'password_confirmation': 'pass'} == \
185 validator.to_python({'new_password': 'pass',
185 validator.to_python({'new_password': 'pass',
186 'password_confirmation': 'pass'})
186 'password_confirmation': 'pass'})
187
187
188 assert {'password': 'pass', 'password_confirmation': 'pass'} == \
188 assert {'password': 'pass', 'password_confirmation': 'pass'} == \
189 validator.to_python({'password': 'pass',
189 validator.to_python({'password': 'pass',
190 'password_confirmation': 'pass'})
190 'password_confirmation': 'pass'})
191
191
192
192
193 def test_ValidAuth(localizer, config_stub):
193 def test_ValidAuth(localizer, config_stub):
194 config_stub.testing_securitypolicy()
194 config_stub.testing_securitypolicy()
195 config_stub.include('rhodecode.authentication')
195 config_stub.include('rhodecode.authentication')
196 config_stub.include('rhodecode.authentication.plugins.auth_rhodecode')
196 config_stub.include('rhodecode.authentication.plugins.auth_rhodecode')
197 config_stub.include('rhodecode.authentication.plugins.auth_token')
197 config_stub.include('rhodecode.authentication.plugins.auth_token')
198
198
199 validator = v.ValidAuth(localizer)
199 validator = v.ValidAuth(localizer)
200 valid_creds = {
200 valid_creds = {
201 'username': TEST_USER_REGULAR2_LOGIN,
201 'username': TEST_USER_REGULAR2_LOGIN,
202 'password': TEST_USER_REGULAR2_PASS,
202 'password': TEST_USER_REGULAR2_PASS,
203 }
203 }
204 invalid_creds = {
204 invalid_creds = {
205 'username': 'err',
205 'username': 'err',
206 'password': 'err',
206 'password': 'err',
207 }
207 }
208 assert valid_creds == validator.to_python(valid_creds)
208 assert valid_creds == validator.to_python(valid_creds)
209 pytest.raises(
209 pytest.raises(
210 formencode.Invalid, validator.to_python, invalid_creds)
210 formencode.Invalid, validator.to_python, invalid_creds)
211
211
212
212
213 def test_ValidRepoName(localizer):
213 def test_ValidRepoName(localizer):
214 validator = v.ValidRepoName(localizer)
214 validator = v.ValidRepoName(localizer)
215
215
216 pytest.raises(
216 pytest.raises(
217 formencode.Invalid, validator.to_python, {'repo_name': ''})
217 formencode.Invalid, validator.to_python, {'repo_name': ''})
218
218
219 pytest.raises(
219 pytest.raises(
220 formencode.Invalid, validator.to_python, {'repo_name': HG_REPO})
220 formencode.Invalid, validator.to_python, {'repo_name': HG_REPO})
221
221
222 gr = RepoGroupModel().create(group_name='group_test',
222 gr = RepoGroupModel().create(group_name='group_test',
223 group_description='desc',
223 group_description='desc',
224 owner=TEST_USER_ADMIN_LOGIN)
224 owner=TEST_USER_ADMIN_LOGIN)
225 pytest.raises(
225 pytest.raises(
226 formencode.Invalid, validator.to_python, {'repo_name': gr.group_name})
226 formencode.Invalid, validator.to_python, {'repo_name': gr.group_name})
227
227
228 #TODO: write an error case for that ie. create a repo withinh a group
228 #TODO: write an error case for that ie. create a repo withinh a group
229 # pytest.raises(formencode.Invalid,
229 # pytest.raises(formencode.Invalid,
230 # validator.to_python, {'repo_name': 'some',
230 # validator.to_python, {'repo_name': 'some',
231 # 'repo_group': gr.group_id})
231 # 'repo_group': gr.group_id})
232
232
233
233
234 def test_ValidForkName(localizer):
234 def test_ValidForkName(localizer):
235 # this uses ValidRepoName validator
235 # this uses ValidRepoName validator
236 assert True
236 assert True
237
237
238 @pytest.mark.parametrize("name, expected", [
238 @pytest.mark.parametrize("name, expected", [
239 ('test', 'test'), ('lolz!', 'lolz'), (' aavv', 'aavv'),
239 ('test', 'test'), ('lolz!', 'lolz'), (' aavv', 'aavv'),
240 ('ala ma kota', 'ala-ma-kota'), ('@nooo', 'nooo'),
240 ('ala ma kota', 'ala-ma-kota'), ('@nooo', 'nooo'),
241 ('$!haha lolz !', 'haha-lolz'), ('$$$$$', ''), ('{}OK!', 'OK'),
241 ('$!haha lolz !', 'haha-lolz'), ('$$$$$', ''), ('{}OK!', 'OK'),
242 ('/]re po', 're-po')])
242 ('/]re po', 're-po')])
243 def test_SlugifyName(name, expected, localizer):
243 def test_SlugifyName(name, expected, localizer):
244 validator = v.SlugifyName(localizer)
244 validator = v.SlugifyName(localizer)
245 assert expected == validator.to_python(name)
245 assert expected == validator.to_python(name)
246
246
247
247
248 def test_ValidForkType(localizer):
248 def test_ValidForkType(localizer):
249 validator = v.ValidForkType(localizer, old_data={'repo_type': 'hg'})
249 validator = v.ValidForkType(localizer, old_data={'repo_type': 'hg'})
250 assert 'hg' == validator.to_python('hg')
250 assert 'hg' == validator.to_python('hg')
251 pytest.raises(formencode.Invalid, validator.to_python, 'git')
251 pytest.raises(formencode.Invalid, validator.to_python, 'git')
252
252
253
253
254 def test_ValidPath(localizer):
254 def test_ValidPath(localizer):
255 validator = v.ValidPath(localizer)
255 validator = v.ValidPath(localizer)
256 assert TESTS_TMP_PATH == validator.to_python(TESTS_TMP_PATH)
256 assert TESTS_TMP_PATH == validator.to_python(TESTS_TMP_PATH)
257 pytest.raises(
257 pytest.raises(
258 formencode.Invalid, validator.to_python, '/no_such_dir')
258 formencode.Invalid, validator.to_python, '/no_such_dir')
259
259
260
260
261 def test_UniqSystemEmail(localizer):
261 def test_UniqSystemEmail(localizer):
262 validator = v.UniqSystemEmail(localizer, old_data={})
262 validator = v.UniqSystemEmail(localizer, old_data={})
263
263
264 assert 'mail@python.org' == validator.to_python('MaiL@Python.org')
264 assert 'mail@python.org' == validator.to_python('MaiL@Python.org')
265
265
266 email = TEST_USER_REGULAR2_EMAIL
266 email = TEST_USER_REGULAR2_EMAIL
267 pytest.raises(formencode.Invalid, validator.to_python, email)
267 pytest.raises(formencode.Invalid, validator.to_python, email)
268
268
269
269
270 def test_ValidSystemEmail(localizer):
270 def test_ValidSystemEmail(localizer):
271 validator = v.ValidSystemEmail(localizer)
271 validator = v.ValidSystemEmail(localizer)
272 email = TEST_USER_REGULAR2_EMAIL
272 email = TEST_USER_REGULAR2_EMAIL
273
273
274 assert email == validator.to_python(email)
274 assert email == validator.to_python(email)
275 pytest.raises(formencode.Invalid, validator.to_python, 'err')
275 pytest.raises(formencode.Invalid, validator.to_python, 'err')
276
276
277
277
278 def test_NotReviewedRevisions(localizer):
278 def test_NotReviewedRevisions(localizer):
279 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
279 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
280 validator = v.NotReviewedRevisions(localizer, repo_id)
280 validator = v.NotReviewedRevisions(localizer, repo_id)
281 rev = '0' * 40
281 rev = '0' * 40
282 # add status for a rev, that should throw an error because it is already
282 # add status for a rev, that should throw an error because it is already
283 # reviewed
283 # reviewed
284 new_status = ChangesetStatus()
284 new_status = ChangesetStatus()
285 new_status.author = ChangesetStatusModel()._get_user(TEST_USER_ADMIN_LOGIN)
285 new_status.author = ChangesetStatusModel()._get_user(TEST_USER_ADMIN_LOGIN)
286 new_status.repo = ChangesetStatusModel()._get_repo(HG_REPO)
286 new_status.repo = ChangesetStatusModel()._get_repo(HG_REPO)
287 new_status.status = ChangesetStatus.STATUS_APPROVED
287 new_status.status = ChangesetStatus.STATUS_APPROVED
288 new_status.comment = None
288 new_status.comment = None
289 new_status.revision = rev
289 new_status.revision = rev
290 Session().add(new_status)
290 Session().add(new_status)
291 Session().commit()
291 Session().commit()
292 try:
292 try:
293 pytest.raises(formencode.Invalid, validator.to_python, [rev])
293 pytest.raises(formencode.Invalid, validator.to_python, [rev])
294 finally:
294 finally:
295 Session().delete(new_status)
295 Session().delete(new_status)
296 Session().commit()
296 Session().commit()
@@ -1,1826 +1,1826 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33 import functools
33 import functools
34
34
35 import mock
35 import mock
36 import pyramid.testing
36 import pyramid.testing
37 import pytest
37 import pytest
38 import colander
38 import colander
39 import requests
39 import requests
40 import pyramid.paster
40 import pyramid.paster
41
41
42 import rhodecode
42 import rhodecode
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.db import (
46 from rhodecode.model.db import (
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
56 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.model.integration import IntegrationModel
57 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations import integration_type_registry
58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.vcs.backends import get_backend
60 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.nodes import FileNode
61 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.tests import (
62 from rhodecode.tests import (
63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_REGULAR_PASS)
65 TEST_USER_REGULAR_PASS)
66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.fixture import Fixture
67 from rhodecode.tests.fixture import Fixture
68 from rhodecode.config import utils as config_utils
68 from rhodecode.config import utils as config_utils
69
69
70 def _split_comma(value):
70 def _split_comma(value):
71 return value.split(',')
71 return value.split(',')
72
72
73
73
74 def pytest_addoption(parser):
74 def pytest_addoption(parser):
75 parser.addoption(
75 parser.addoption(
76 '--keep-tmp-path', action='store_true',
76 '--keep-tmp-path', action='store_true',
77 help="Keep the test temporary directories")
77 help="Keep the test temporary directories")
78 parser.addoption(
78 parser.addoption(
79 '--backends', action='store', type=_split_comma,
79 '--backends', action='store', type=_split_comma,
80 default=['git', 'hg', 'svn'],
80 default=['git', 'hg', 'svn'],
81 help="Select which backends to test for backend specific tests.")
81 help="Select which backends to test for backend specific tests.")
82 parser.addoption(
82 parser.addoption(
83 '--dbs', action='store', type=_split_comma,
83 '--dbs', action='store', type=_split_comma,
84 default=['sqlite'],
84 default=['sqlite'],
85 help="Select which database to test for database specific tests. "
85 help="Select which database to test for database specific tests. "
86 "Possible options are sqlite,postgres,mysql")
86 "Possible options are sqlite,postgres,mysql")
87 parser.addoption(
87 parser.addoption(
88 '--appenlight', '--ae', action='store_true',
88 '--appenlight', '--ae', action='store_true',
89 help="Track statistics in appenlight.")
89 help="Track statistics in appenlight.")
90 parser.addoption(
90 parser.addoption(
91 '--appenlight-api-key', '--ae-key',
91 '--appenlight-api-key', '--ae-key',
92 help="API key for Appenlight.")
92 help="API key for Appenlight.")
93 parser.addoption(
93 parser.addoption(
94 '--appenlight-url', '--ae-url',
94 '--appenlight-url', '--ae-url',
95 default="https://ae.rhodecode.com",
95 default="https://ae.rhodecode.com",
96 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
96 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 parser.addoption(
97 parser.addoption(
98 '--sqlite-connection-string', action='store',
98 '--sqlite-connection-string', action='store',
99 default='', help="Connection string for the dbs tests with SQLite")
99 default='', help="Connection string for the dbs tests with SQLite")
100 parser.addoption(
100 parser.addoption(
101 '--postgres-connection-string', action='store',
101 '--postgres-connection-string', action='store',
102 default='', help="Connection string for the dbs tests with Postgres")
102 default='', help="Connection string for the dbs tests with Postgres")
103 parser.addoption(
103 parser.addoption(
104 '--mysql-connection-string', action='store',
104 '--mysql-connection-string', action='store',
105 default='', help="Connection string for the dbs tests with MySQL")
105 default='', help="Connection string for the dbs tests with MySQL")
106 parser.addoption(
106 parser.addoption(
107 '--repeat', type=int, default=100,
107 '--repeat', type=int, default=100,
108 help="Number of repetitions in performance tests.")
108 help="Number of repetitions in performance tests.")
109
109
110
110
111 def pytest_configure(config):
111 def pytest_configure(config):
112 from rhodecode.config import patches
112 from rhodecode.config import patches
113
113
114
114
115 def pytest_collection_modifyitems(session, config, items):
115 def pytest_collection_modifyitems(session, config, items):
116 # nottest marked, compare nose, used for transition from nose to pytest
116 # nottest marked, compare nose, used for transition from nose to pytest
117 remaining = [
117 remaining = [
118 i for i in items if getattr(i.obj, '__test__', True)]
118 i for i in items if getattr(i.obj, '__test__', True)]
119 items[:] = remaining
119 items[:] = remaining
120
120
121
121
122 def pytest_generate_tests(metafunc):
122 def pytest_generate_tests(metafunc):
123 # Support test generation based on --backend parameter
123 # Support test generation based on --backend parameter
124 if 'backend_alias' in metafunc.fixturenames:
124 if 'backend_alias' in metafunc.fixturenames:
125 backends = get_backends_from_metafunc(metafunc)
125 backends = get_backends_from_metafunc(metafunc)
126 scope = None
126 scope = None
127 if not backends:
127 if not backends:
128 pytest.skip("Not enabled for any of selected backends")
128 pytest.skip("Not enabled for any of selected backends")
129 metafunc.parametrize('backend_alias', backends, scope=scope)
129 metafunc.parametrize('backend_alias', backends, scope=scope)
130 elif hasattr(metafunc.function, 'backends'):
130 elif hasattr(metafunc.function, 'backends'):
131 backends = get_backends_from_metafunc(metafunc)
131 backends = get_backends_from_metafunc(metafunc)
132 if not backends:
132 if not backends:
133 pytest.skip("Not enabled for any of selected backends")
133 pytest.skip("Not enabled for any of selected backends")
134
134
135
135
136 def get_backends_from_metafunc(metafunc):
136 def get_backends_from_metafunc(metafunc):
137 requested_backends = set(metafunc.config.getoption('--backends'))
137 requested_backends = set(metafunc.config.getoption('--backends'))
138 if hasattr(metafunc.function, 'backends'):
138 if hasattr(metafunc.function, 'backends'):
139 # Supported backends by this test function, created from
139 # Supported backends by this test function, created from
140 # pytest.mark.backends
140 # pytest.mark.backends
141 backends = metafunc.definition.get_closest_marker('backends').args
141 backends = metafunc.definition.get_closest_marker('backends').args
142 elif hasattr(metafunc.cls, 'backend_alias'):
142 elif hasattr(metafunc.cls, 'backend_alias'):
143 # Support class attribute "backend_alias", this is mainly
143 # Support class attribute "backend_alias", this is mainly
144 # for legacy reasons for tests not yet using pytest.mark.backends
144 # for legacy reasons for tests not yet using pytest.mark.backends
145 backends = [metafunc.cls.backend_alias]
145 backends = [metafunc.cls.backend_alias]
146 else:
146 else:
147 backends = metafunc.config.getoption('--backends')
147 backends = metafunc.config.getoption('--backends')
148 return requested_backends.intersection(backends)
148 return requested_backends.intersection(backends)
149
149
150
150
151 @pytest.fixture(scope='session', autouse=True)
151 @pytest.fixture(scope='session', autouse=True)
152 def activate_example_rcextensions(request):
152 def activate_example_rcextensions(request):
153 """
153 """
154 Patch in an example rcextensions module which verifies passed in kwargs.
154 Patch in an example rcextensions module which verifies passed in kwargs.
155 """
155 """
156 from rhodecode.config import rcextensions
156 from rhodecode.config import rcextensions
157
157
158 old_extensions = rhodecode.EXTENSIONS
158 old_extensions = rhodecode.EXTENSIONS
159 rhodecode.EXTENSIONS = rcextensions
159 rhodecode.EXTENSIONS = rcextensions
160 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
160 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
161
161
162 @request.addfinalizer
162 @request.addfinalizer
163 def cleanup():
163 def cleanup():
164 rhodecode.EXTENSIONS = old_extensions
164 rhodecode.EXTENSIONS = old_extensions
165
165
166
166
167 @pytest.fixture
167 @pytest.fixture()
168 def capture_rcextensions():
168 def capture_rcextensions():
169 """
169 """
170 Returns the recorded calls to entry points in rcextensions.
170 Returns the recorded calls to entry points in rcextensions.
171 """
171 """
172 calls = rhodecode.EXTENSIONS.calls
172 calls = rhodecode.EXTENSIONS.calls
173 calls.clear()
173 calls.clear()
174 # Note: At this moment, it is still the empty dict, but that will
174 # Note: At this moment, it is still the empty dict, but that will
175 # be filled during the test run and since it is a reference this
175 # be filled during the test run and since it is a reference this
176 # is enough to make it work.
176 # is enough to make it work.
177 return calls
177 return calls
178
178
179
179
180 @pytest.fixture(scope='session')
180 @pytest.fixture(scope='session')
181 def http_environ_session():
181 def http_environ_session():
182 """
182 """
183 Allow to use "http_environ" in session scope.
183 Allow to use "http_environ" in session scope.
184 """
184 """
185 return plain_http_environ()
185 return plain_http_environ()
186
186
187
187
188 def plain_http_host_stub():
188 def plain_http_host_stub():
189 """
189 """
190 Value of HTTP_HOST in the test run.
190 Value of HTTP_HOST in the test run.
191 """
191 """
192 return 'example.com:80'
192 return 'example.com:80'
193
193
194
194
195 @pytest.fixture
195 @pytest.fixture()
196 def http_host_stub():
196 def http_host_stub():
197 """
197 """
198 Value of HTTP_HOST in the test run.
198 Value of HTTP_HOST in the test run.
199 """
199 """
200 return plain_http_host_stub()
200 return plain_http_host_stub()
201
201
202
202
203 def plain_http_host_only_stub():
203 def plain_http_host_only_stub():
204 """
204 """
205 Value of HTTP_HOST in the test run.
205 Value of HTTP_HOST in the test run.
206 """
206 """
207 return plain_http_host_stub().split(':')[0]
207 return plain_http_host_stub().split(':')[0]
208
208
209
209
210 @pytest.fixture
210 @pytest.fixture()
211 def http_host_only_stub():
211 def http_host_only_stub():
212 """
212 """
213 Value of HTTP_HOST in the test run.
213 Value of HTTP_HOST in the test run.
214 """
214 """
215 return plain_http_host_only_stub()
215 return plain_http_host_only_stub()
216
216
217
217
218 def plain_http_environ():
218 def plain_http_environ():
219 """
219 """
220 HTTP extra environ keys.
220 HTTP extra environ keys.
221
221
222 User by the test application and as well for setting up the pylons
222 User by the test application and as well for setting up the pylons
223 environment. In the case of the fixture "app" it should be possible
223 environment. In the case of the fixture "app" it should be possible
224 to override this for a specific test case.
224 to override this for a specific test case.
225 """
225 """
226 return {
226 return {
227 'SERVER_NAME': plain_http_host_only_stub(),
227 'SERVER_NAME': plain_http_host_only_stub(),
228 'SERVER_PORT': plain_http_host_stub().split(':')[1],
228 'SERVER_PORT': plain_http_host_stub().split(':')[1],
229 'HTTP_HOST': plain_http_host_stub(),
229 'HTTP_HOST': plain_http_host_stub(),
230 'HTTP_USER_AGENT': 'rc-test-agent',
230 'HTTP_USER_AGENT': 'rc-test-agent',
231 'REQUEST_METHOD': 'GET'
231 'REQUEST_METHOD': 'GET'
232 }
232 }
233
233
234
234
235 @pytest.fixture
235 @pytest.fixture()
236 def http_environ():
236 def http_environ():
237 """
237 """
238 HTTP extra environ keys.
238 HTTP extra environ keys.
239
239
240 User by the test application and as well for setting up the pylons
240 User by the test application and as well for setting up the pylons
241 environment. In the case of the fixture "app" it should be possible
241 environment. In the case of the fixture "app" it should be possible
242 to override this for a specific test case.
242 to override this for a specific test case.
243 """
243 """
244 return plain_http_environ()
244 return plain_http_environ()
245
245
246
246
247 @pytest.fixture(scope='session')
247 @pytest.fixture(scope='session')
248 def baseapp(ini_config, vcsserver, http_environ_session):
248 def baseapp(ini_config, vcsserver, http_environ_session):
249 from rhodecode.lib.pyramid_utils import get_app_config
249 from rhodecode.lib.pyramid_utils import get_app_config
250 from rhodecode.config.middleware import make_pyramid_app
250 from rhodecode.config.middleware import make_pyramid_app
251
251
252 print("Using the RhodeCode configuration:{}".format(ini_config))
252 print("Using the RhodeCode configuration:{}".format(ini_config))
253 pyramid.paster.setup_logging(ini_config)
253 pyramid.paster.setup_logging(ini_config)
254
254
255 settings = get_app_config(ini_config)
255 settings = get_app_config(ini_config)
256 app = make_pyramid_app({'__file__': ini_config}, **settings)
256 app = make_pyramid_app({'__file__': ini_config}, **settings)
257
257
258 return app
258 return app
259
259
260
260
261 @pytest.fixture(scope='function')
261 @pytest.fixture(scope='function')
262 def app(request, config_stub, baseapp, http_environ):
262 def app(request, config_stub, baseapp, http_environ):
263 app = CustomTestApp(
263 app = CustomTestApp(
264 baseapp,
264 baseapp,
265 extra_environ=http_environ)
265 extra_environ=http_environ)
266 if request.cls:
266 if request.cls:
267 request.cls.app = app
267 request.cls.app = app
268 return app
268 return app
269
269
270
270
271 @pytest.fixture(scope='session')
271 @pytest.fixture(scope='session')
272 def app_settings(baseapp, ini_config):
272 def app_settings(baseapp, ini_config):
273 """
273 """
274 Settings dictionary used to create the app.
274 Settings dictionary used to create the app.
275
275
276 Parses the ini file and passes the result through the sanitize and apply
276 Parses the ini file and passes the result through the sanitize and apply
277 defaults mechanism in `rhodecode.config.middleware`.
277 defaults mechanism in `rhodecode.config.middleware`.
278 """
278 """
279 return baseapp.config.get_settings()
279 return baseapp.config.get_settings()
280
280
281
281
282 @pytest.fixture(scope='session')
282 @pytest.fixture(scope='session')
283 def db_connection(ini_settings):
283 def db_connection(ini_settings):
284 # Initialize the database connection.
284 # Initialize the database connection.
285 config_utils.initialize_database(ini_settings)
285 config_utils.initialize_database(ini_settings)
286
286
287
287
288 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
288 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
289
289
290
290
291 def _autologin_user(app, *args):
291 def _autologin_user(app, *args):
292 session = login_user_session(app, *args)
292 session = login_user_session(app, *args)
293 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
293 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
294 return LoginData(csrf_token, session['rhodecode_user'])
294 return LoginData(csrf_token, session['rhodecode_user'])
295
295
296
296
297 @pytest.fixture
297 @pytest.fixture()
298 def autologin_user(app):
298 def autologin_user(app):
299 """
299 """
300 Utility fixture which makes sure that the admin user is logged in
300 Utility fixture which makes sure that the admin user is logged in
301 """
301 """
302 return _autologin_user(app)
302 return _autologin_user(app)
303
303
304
304
305 @pytest.fixture
305 @pytest.fixture()
306 def autologin_regular_user(app):
306 def autologin_regular_user(app):
307 """
307 """
308 Utility fixture which makes sure that the regular user is logged in
308 Utility fixture which makes sure that the regular user is logged in
309 """
309 """
310 return _autologin_user(
310 return _autologin_user(
311 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
311 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
312
312
313
313
314 @pytest.fixture(scope='function')
314 @pytest.fixture(scope='function')
315 def csrf_token(request, autologin_user):
315 def csrf_token(request, autologin_user):
316 return autologin_user.csrf_token
316 return autologin_user.csrf_token
317
317
318
318
319 @pytest.fixture(scope='function')
319 @pytest.fixture(scope='function')
320 def xhr_header(request):
320 def xhr_header(request):
321 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
321 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
322
322
323
323
324 @pytest.fixture
324 @pytest.fixture()
325 def real_crypto_backend(monkeypatch):
325 def real_crypto_backend(monkeypatch):
326 """
326 """
327 Switch the production crypto backend on for this test.
327 Switch the production crypto backend on for this test.
328
328
329 During the test run the crypto backend is replaced with a faster
329 During the test run the crypto backend is replaced with a faster
330 implementation based on the MD5 algorithm.
330 implementation based on the MD5 algorithm.
331 """
331 """
332 monkeypatch.setattr(rhodecode, 'is_test', False)
332 monkeypatch.setattr(rhodecode, 'is_test', False)
333
333
334
334
335 @pytest.fixture(scope='class')
335 @pytest.fixture(scope='class')
336 def index_location(request, baseapp):
336 def index_location(request, baseapp):
337 index_location = baseapp.config.get_settings()['search.location']
337 index_location = baseapp.config.get_settings()['search.location']
338 if request.cls:
338 if request.cls:
339 request.cls.index_location = index_location
339 request.cls.index_location = index_location
340 return index_location
340 return index_location
341
341
342
342
343 @pytest.fixture(scope='session', autouse=True)
343 @pytest.fixture(scope='session', autouse=True)
344 def tests_tmp_path(request):
344 def tests_tmp_path(request):
345 """
345 """
346 Create temporary directory to be used during the test session.
346 Create temporary directory to be used during the test session.
347 """
347 """
348 if not os.path.exists(TESTS_TMP_PATH):
348 if not os.path.exists(TESTS_TMP_PATH):
349 os.makedirs(TESTS_TMP_PATH)
349 os.makedirs(TESTS_TMP_PATH)
350
350
351 if not request.config.getoption('--keep-tmp-path'):
351 if not request.config.getoption('--keep-tmp-path'):
352 @request.addfinalizer
352 @request.addfinalizer
353 def remove_tmp_path():
353 def remove_tmp_path():
354 shutil.rmtree(TESTS_TMP_PATH)
354 shutil.rmtree(TESTS_TMP_PATH)
355
355
356 return TESTS_TMP_PATH
356 return TESTS_TMP_PATH
357
357
358
358
359 @pytest.fixture
359 @pytest.fixture()
360 def test_repo_group(request):
360 def test_repo_group(request):
361 """
361 """
362 Create a temporary repository group, and destroy it after
362 Create a temporary repository group, and destroy it after
363 usage automatically
363 usage automatically
364 """
364 """
365 fixture = Fixture()
365 fixture = Fixture()
366 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
366 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
367 repo_group = fixture.create_repo_group(repogroupid)
367 repo_group = fixture.create_repo_group(repogroupid)
368
368
369 def _cleanup():
369 def _cleanup():
370 fixture.destroy_repo_group(repogroupid)
370 fixture.destroy_repo_group(repogroupid)
371
371
372 request.addfinalizer(_cleanup)
372 request.addfinalizer(_cleanup)
373 return repo_group
373 return repo_group
374
374
375
375
376 @pytest.fixture
376 @pytest.fixture()
377 def test_user_group(request):
377 def test_user_group(request):
378 """
378 """
379 Create a temporary user group, and destroy it after
379 Create a temporary user group, and destroy it after
380 usage automatically
380 usage automatically
381 """
381 """
382 fixture = Fixture()
382 fixture = Fixture()
383 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
383 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
384 user_group = fixture.create_user_group(usergroupid)
384 user_group = fixture.create_user_group(usergroupid)
385
385
386 def _cleanup():
386 def _cleanup():
387 fixture.destroy_user_group(user_group)
387 fixture.destroy_user_group(user_group)
388
388
389 request.addfinalizer(_cleanup)
389 request.addfinalizer(_cleanup)
390 return user_group
390 return user_group
391
391
392
392
393 @pytest.fixture(scope='session')
393 @pytest.fixture(scope='session')
394 def test_repo(request):
394 def test_repo(request):
395 container = TestRepoContainer()
395 container = TestRepoContainer()
396 request.addfinalizer(container._cleanup)
396 request.addfinalizer(container._cleanup)
397 return container
397 return container
398
398
399
399
400 class TestRepoContainer(object):
400 class TestRepoContainer(object):
401 """
401 """
402 Container for test repositories which are used read only.
402 Container for test repositories which are used read only.
403
403
404 Repositories will be created on demand and re-used during the lifetime
404 Repositories will be created on demand and re-used during the lifetime
405 of this object.
405 of this object.
406
406
407 Usage to get the svn test repository "minimal"::
407 Usage to get the svn test repository "minimal"::
408
408
409 test_repo = TestContainer()
409 test_repo = TestContainer()
410 repo = test_repo('minimal', 'svn')
410 repo = test_repo('minimal', 'svn')
411
411
412 """
412 """
413
413
414 dump_extractors = {
414 dump_extractors = {
415 'git': utils.extract_git_repo_from_dump,
415 'git': utils.extract_git_repo_from_dump,
416 'hg': utils.extract_hg_repo_from_dump,
416 'hg': utils.extract_hg_repo_from_dump,
417 'svn': utils.extract_svn_repo_from_dump,
417 'svn': utils.extract_svn_repo_from_dump,
418 }
418 }
419
419
420 def __init__(self):
420 def __init__(self):
421 self._cleanup_repos = []
421 self._cleanup_repos = []
422 self._fixture = Fixture()
422 self._fixture = Fixture()
423 self._repos = {}
423 self._repos = {}
424
424
425 def __call__(self, dump_name, backend_alias, config=None):
425 def __call__(self, dump_name, backend_alias, config=None):
426 key = (dump_name, backend_alias)
426 key = (dump_name, backend_alias)
427 if key not in self._repos:
427 if key not in self._repos:
428 repo = self._create_repo(dump_name, backend_alias, config)
428 repo = self._create_repo(dump_name, backend_alias, config)
429 self._repos[key] = repo.repo_id
429 self._repos[key] = repo.repo_id
430 return Repository.get(self._repos[key])
430 return Repository.get(self._repos[key])
431
431
432 def _create_repo(self, dump_name, backend_alias, config):
432 def _create_repo(self, dump_name, backend_alias, config):
433 repo_name = '%s-%s' % (backend_alias, dump_name)
433 repo_name = '%s-%s' % (backend_alias, dump_name)
434 backend = get_backend(backend_alias)
434 backend = get_backend(backend_alias)
435 dump_extractor = self.dump_extractors[backend_alias]
435 dump_extractor = self.dump_extractors[backend_alias]
436 repo_path = dump_extractor(dump_name, repo_name)
436 repo_path = dump_extractor(dump_name, repo_name)
437
437
438 vcs_repo = backend(repo_path, config=config)
438 vcs_repo = backend(repo_path, config=config)
439 repo2db_mapper({repo_name: vcs_repo})
439 repo2db_mapper({repo_name: vcs_repo})
440
440
441 repo = RepoModel().get_by_repo_name(repo_name)
441 repo = RepoModel().get_by_repo_name(repo_name)
442 self._cleanup_repos.append(repo_name)
442 self._cleanup_repos.append(repo_name)
443 return repo
443 return repo
444
444
445 def _cleanup(self):
445 def _cleanup(self):
446 for repo_name in reversed(self._cleanup_repos):
446 for repo_name in reversed(self._cleanup_repos):
447 self._fixture.destroy_repo(repo_name)
447 self._fixture.destroy_repo(repo_name)
448
448
449
449
450 def backend_base(request, backend_alias, baseapp, test_repo):
450 def backend_base(request, backend_alias, baseapp, test_repo):
451 if backend_alias not in request.config.getoption('--backends'):
451 if backend_alias not in request.config.getoption('--backends'):
452 pytest.skip("Backend %s not selected." % (backend_alias, ))
452 pytest.skip("Backend %s not selected." % (backend_alias, ))
453
453
454 utils.check_xfail_backends(request.node, backend_alias)
454 utils.check_xfail_backends(request.node, backend_alias)
455 utils.check_skip_backends(request.node, backend_alias)
455 utils.check_skip_backends(request.node, backend_alias)
456
456
457 repo_name = 'vcs_test_%s' % (backend_alias, )
457 repo_name = 'vcs_test_%s' % (backend_alias, )
458 backend = Backend(
458 backend = Backend(
459 alias=backend_alias,
459 alias=backend_alias,
460 repo_name=repo_name,
460 repo_name=repo_name,
461 test_name=request.node.name,
461 test_name=request.node.name,
462 test_repo_container=test_repo)
462 test_repo_container=test_repo)
463 request.addfinalizer(backend.cleanup)
463 request.addfinalizer(backend.cleanup)
464 return backend
464 return backend
465
465
466
466
467 @pytest.fixture
467 @pytest.fixture()
468 def backend(request, backend_alias, baseapp, test_repo):
468 def backend(request, backend_alias, baseapp, test_repo):
469 """
469 """
470 Parametrized fixture which represents a single backend implementation.
470 Parametrized fixture which represents a single backend implementation.
471
471
472 It respects the option `--backends` to focus the test run on specific
472 It respects the option `--backends` to focus the test run on specific
473 backend implementations.
473 backend implementations.
474
474
475 It also supports `pytest.mark.xfail_backends` to mark tests as failing
475 It also supports `pytest.mark.xfail_backends` to mark tests as failing
476 for specific backends. This is intended as a utility for incremental
476 for specific backends. This is intended as a utility for incremental
477 development of a new backend implementation.
477 development of a new backend implementation.
478 """
478 """
479 return backend_base(request, backend_alias, baseapp, test_repo)
479 return backend_base(request, backend_alias, baseapp, test_repo)
480
480
481
481
482 @pytest.fixture
482 @pytest.fixture()
483 def backend_git(request, baseapp, test_repo):
483 def backend_git(request, baseapp, test_repo):
484 return backend_base(request, 'git', baseapp, test_repo)
484 return backend_base(request, 'git', baseapp, test_repo)
485
485
486
486
487 @pytest.fixture
487 @pytest.fixture()
488 def backend_hg(request, baseapp, test_repo):
488 def backend_hg(request, baseapp, test_repo):
489 return backend_base(request, 'hg', baseapp, test_repo)
489 return backend_base(request, 'hg', baseapp, test_repo)
490
490
491
491
492 @pytest.fixture
492 @pytest.fixture()
493 def backend_svn(request, baseapp, test_repo):
493 def backend_svn(request, baseapp, test_repo):
494 return backend_base(request, 'svn', baseapp, test_repo)
494 return backend_base(request, 'svn', baseapp, test_repo)
495
495
496
496
497 @pytest.fixture
497 @pytest.fixture()
498 def backend_random(backend_git):
498 def backend_random(backend_git):
499 """
499 """
500 Use this to express that your tests need "a backend.
500 Use this to express that your tests need "a backend.
501
501
502 A few of our tests need a backend, so that we can run the code. This
502 A few of our tests need a backend, so that we can run the code. This
503 fixture is intended to be used for such cases. It will pick one of the
503 fixture is intended to be used for such cases. It will pick one of the
504 backends and run the tests.
504 backends and run the tests.
505
505
506 The fixture `backend` would run the test multiple times for each
506 The fixture `backend` would run the test multiple times for each
507 available backend which is a pure waste of time if the test is
507 available backend which is a pure waste of time if the test is
508 independent of the backend type.
508 independent of the backend type.
509 """
509 """
510 # TODO: johbo: Change this to pick a random backend
510 # TODO: johbo: Change this to pick a random backend
511 return backend_git
511 return backend_git
512
512
513
513
514 @pytest.fixture
514 @pytest.fixture()
515 def backend_stub(backend_git):
515 def backend_stub(backend_git):
516 """
516 """
517 Use this to express that your tests need a backend stub
517 Use this to express that your tests need a backend stub
518
518
519 TODO: mikhail: Implement a real stub logic instead of returning
519 TODO: mikhail: Implement a real stub logic instead of returning
520 a git backend
520 a git backend
521 """
521 """
522 return backend_git
522 return backend_git
523
523
524
524
525 @pytest.fixture
525 @pytest.fixture()
526 def repo_stub(backend_stub):
526 def repo_stub(backend_stub):
527 """
527 """
528 Use this to express that your tests need a repository stub
528 Use this to express that your tests need a repository stub
529 """
529 """
530 return backend_stub.create_repo()
530 return backend_stub.create_repo()
531
531
532
532
533 class Backend(object):
533 class Backend(object):
534 """
534 """
535 Represents the test configuration for one supported backend
535 Represents the test configuration for one supported backend
536
536
537 Provides easy access to different test repositories based on
537 Provides easy access to different test repositories based on
538 `__getitem__`. Such repositories will only be created once per test
538 `__getitem__`. Such repositories will only be created once per test
539 session.
539 session.
540 """
540 """
541
541
542 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
542 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
543 _master_repo = None
543 _master_repo = None
544 _commit_ids = {}
544 _commit_ids = {}
545
545
546 def __init__(self, alias, repo_name, test_name, test_repo_container):
546 def __init__(self, alias, repo_name, test_name, test_repo_container):
547 self.alias = alias
547 self.alias = alias
548 self.repo_name = repo_name
548 self.repo_name = repo_name
549 self._cleanup_repos = []
549 self._cleanup_repos = []
550 self._test_name = test_name
550 self._test_name = test_name
551 self._test_repo_container = test_repo_container
551 self._test_repo_container = test_repo_container
552 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
552 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
553 # Fixture will survive in the end.
553 # Fixture will survive in the end.
554 self._fixture = Fixture()
554 self._fixture = Fixture()
555
555
556 def __getitem__(self, key):
556 def __getitem__(self, key):
557 return self._test_repo_container(key, self.alias)
557 return self._test_repo_container(key, self.alias)
558
558
559 def create_test_repo(self, key, config=None):
559 def create_test_repo(self, key, config=None):
560 return self._test_repo_container(key, self.alias, config)
560 return self._test_repo_container(key, self.alias, config)
561
561
562 @property
562 @property
563 def repo(self):
563 def repo(self):
564 """
564 """
565 Returns the "current" repository. This is the vcs_test repo or the
565 Returns the "current" repository. This is the vcs_test repo or the
566 last repo which has been created with `create_repo`.
566 last repo which has been created with `create_repo`.
567 """
567 """
568 from rhodecode.model.db import Repository
568 from rhodecode.model.db import Repository
569 return Repository.get_by_repo_name(self.repo_name)
569 return Repository.get_by_repo_name(self.repo_name)
570
570
571 @property
571 @property
572 def default_branch_name(self):
572 def default_branch_name(self):
573 VcsRepository = get_backend(self.alias)
573 VcsRepository = get_backend(self.alias)
574 return VcsRepository.DEFAULT_BRANCH_NAME
574 return VcsRepository.DEFAULT_BRANCH_NAME
575
575
576 @property
576 @property
577 def default_head_id(self):
577 def default_head_id(self):
578 """
578 """
579 Returns the default head id of the underlying backend.
579 Returns the default head id of the underlying backend.
580
580
581 This will be the default branch name in case the backend does have a
581 This will be the default branch name in case the backend does have a
582 default branch. In the other cases it will point to a valid head
582 default branch. In the other cases it will point to a valid head
583 which can serve as the base to create a new commit on top of it.
583 which can serve as the base to create a new commit on top of it.
584 """
584 """
585 vcsrepo = self.repo.scm_instance()
585 vcsrepo = self.repo.scm_instance()
586 head_id = (
586 head_id = (
587 vcsrepo.DEFAULT_BRANCH_NAME or
587 vcsrepo.DEFAULT_BRANCH_NAME or
588 vcsrepo.commit_ids[-1])
588 vcsrepo.commit_ids[-1])
589 return head_id
589 return head_id
590
590
591 @property
591 @property
592 def commit_ids(self):
592 def commit_ids(self):
593 """
593 """
594 Returns the list of commits for the last created repository
594 Returns the list of commits for the last created repository
595 """
595 """
596 return self._commit_ids
596 return self._commit_ids
597
597
598 def create_master_repo(self, commits):
598 def create_master_repo(self, commits):
599 """
599 """
600 Create a repository and remember it as a template.
600 Create a repository and remember it as a template.
601
601
602 This allows to easily create derived repositories to construct
602 This allows to easily create derived repositories to construct
603 more complex scenarios for diff, compare and pull requests.
603 more complex scenarios for diff, compare and pull requests.
604
604
605 Returns a commit map which maps from commit message to raw_id.
605 Returns a commit map which maps from commit message to raw_id.
606 """
606 """
607 self._master_repo = self.create_repo(commits=commits)
607 self._master_repo = self.create_repo(commits=commits)
608 return self._commit_ids
608 return self._commit_ids
609
609
610 def create_repo(
610 def create_repo(
611 self, commits=None, number_of_commits=0, heads=None,
611 self, commits=None, number_of_commits=0, heads=None,
612 name_suffix=u'', bare=False, **kwargs):
612 name_suffix=u'', bare=False, **kwargs):
613 """
613 """
614 Create a repository and record it for later cleanup.
614 Create a repository and record it for later cleanup.
615
615
616 :param commits: Optional. A sequence of dict instances.
616 :param commits: Optional. A sequence of dict instances.
617 Will add a commit per entry to the new repository.
617 Will add a commit per entry to the new repository.
618 :param number_of_commits: Optional. If set to a number, this number of
618 :param number_of_commits: Optional. If set to a number, this number of
619 commits will be added to the new repository.
619 commits will be added to the new repository.
620 :param heads: Optional. Can be set to a sequence of of commit
620 :param heads: Optional. Can be set to a sequence of of commit
621 names which shall be pulled in from the master repository.
621 names which shall be pulled in from the master repository.
622 :param name_suffix: adds special suffix to generated repo name
622 :param name_suffix: adds special suffix to generated repo name
623 :param bare: set a repo as bare (no checkout)
623 :param bare: set a repo as bare (no checkout)
624 """
624 """
625 self.repo_name = self._next_repo_name() + name_suffix
625 self.repo_name = self._next_repo_name() + name_suffix
626 repo = self._fixture.create_repo(
626 repo = self._fixture.create_repo(
627 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
627 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
628 self._cleanup_repos.append(repo.repo_name)
628 self._cleanup_repos.append(repo.repo_name)
629
629
630 commits = commits or [
630 commits = commits or [
631 {'message': 'Commit %s of %s' % (x, self.repo_name)}
631 {'message': 'Commit %s of %s' % (x, self.repo_name)}
632 for x in range(number_of_commits)]
632 for x in range(number_of_commits)]
633 vcs_repo = repo.scm_instance()
633 vcs_repo = repo.scm_instance()
634 vcs_repo.count()
634 vcs_repo.count()
635 self._add_commits_to_repo(vcs_repo, commits)
635 self._add_commits_to_repo(vcs_repo, commits)
636 if heads:
636 if heads:
637 self.pull_heads(repo, heads)
637 self.pull_heads(repo, heads)
638
638
639 return repo
639 return repo
640
640
641 def pull_heads(self, repo, heads):
641 def pull_heads(self, repo, heads):
642 """
642 """
643 Make sure that repo contains all commits mentioned in `heads`
643 Make sure that repo contains all commits mentioned in `heads`
644 """
644 """
645 vcsmaster = self._master_repo.scm_instance()
645 vcsmaster = self._master_repo.scm_instance()
646 vcsrepo = repo.scm_instance()
646 vcsrepo = repo.scm_instance()
647 vcsrepo.config.clear_section('hooks')
647 vcsrepo.config.clear_section('hooks')
648 commit_ids = [self._commit_ids[h] for h in heads]
648 commit_ids = [self._commit_ids[h] for h in heads]
649 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
649 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
650
650
651 def create_fork(self):
651 def create_fork(self):
652 repo_to_fork = self.repo_name
652 repo_to_fork = self.repo_name
653 self.repo_name = self._next_repo_name()
653 self.repo_name = self._next_repo_name()
654 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
654 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
655 self._cleanup_repos.append(self.repo_name)
655 self._cleanup_repos.append(self.repo_name)
656 return repo
656 return repo
657
657
658 def new_repo_name(self, suffix=u''):
658 def new_repo_name(self, suffix=u''):
659 self.repo_name = self._next_repo_name() + suffix
659 self.repo_name = self._next_repo_name() + suffix
660 self._cleanup_repos.append(self.repo_name)
660 self._cleanup_repos.append(self.repo_name)
661 return self.repo_name
661 return self.repo_name
662
662
663 def _next_repo_name(self):
663 def _next_repo_name(self):
664 return u"%s_%s" % (
664 return u"%s_%s" % (
665 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
665 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
666
666
667 def ensure_file(self, filename, content='Test content\n'):
667 def ensure_file(self, filename, content='Test content\n'):
668 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
668 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
669 commits = [
669 commits = [
670 {'added': [
670 {'added': [
671 FileNode(filename, content=content),
671 FileNode(filename, content=content),
672 ]},
672 ]},
673 ]
673 ]
674 self._add_commits_to_repo(self.repo.scm_instance(), commits)
674 self._add_commits_to_repo(self.repo.scm_instance(), commits)
675
675
676 def enable_downloads(self):
676 def enable_downloads(self):
677 repo = self.repo
677 repo = self.repo
678 repo.enable_downloads = True
678 repo.enable_downloads = True
679 Session().add(repo)
679 Session().add(repo)
680 Session().commit()
680 Session().commit()
681
681
682 def cleanup(self):
682 def cleanup(self):
683 for repo_name in reversed(self._cleanup_repos):
683 for repo_name in reversed(self._cleanup_repos):
684 self._fixture.destroy_repo(repo_name)
684 self._fixture.destroy_repo(repo_name)
685
685
686 def _add_commits_to_repo(self, repo, commits):
686 def _add_commits_to_repo(self, repo, commits):
687 commit_ids = _add_commits_to_repo(repo, commits)
687 commit_ids = _add_commits_to_repo(repo, commits)
688 if not commit_ids:
688 if not commit_ids:
689 return
689 return
690 self._commit_ids = commit_ids
690 self._commit_ids = commit_ids
691
691
692 # Creating refs for Git to allow fetching them from remote repository
692 # Creating refs for Git to allow fetching them from remote repository
693 if self.alias == 'git':
693 if self.alias == 'git':
694 refs = {}
694 refs = {}
695 for message in self._commit_ids:
695 for message in self._commit_ids:
696 # TODO: mikhail: do more special chars replacements
696 # TODO: mikhail: do more special chars replacements
697 ref_name = 'refs/test-refs/{}'.format(
697 ref_name = 'refs/test-refs/{}'.format(
698 message.replace(' ', ''))
698 message.replace(' ', ''))
699 refs[ref_name] = self._commit_ids[message]
699 refs[ref_name] = self._commit_ids[message]
700 self._create_refs(repo, refs)
700 self._create_refs(repo, refs)
701
701
702 def _create_refs(self, repo, refs):
702 def _create_refs(self, repo, refs):
703 for ref_name in refs:
703 for ref_name in refs:
704 repo.set_refs(ref_name, refs[ref_name])
704 repo.set_refs(ref_name, refs[ref_name])
705
705
706
706
707 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
707 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
708 if backend_alias not in request.config.getoption('--backends'):
708 if backend_alias not in request.config.getoption('--backends'):
709 pytest.skip("Backend %s not selected." % (backend_alias, ))
709 pytest.skip("Backend %s not selected." % (backend_alias, ))
710
710
711 utils.check_xfail_backends(request.node, backend_alias)
711 utils.check_xfail_backends(request.node, backend_alias)
712 utils.check_skip_backends(request.node, backend_alias)
712 utils.check_skip_backends(request.node, backend_alias)
713
713
714 repo_name = 'vcs_test_%s' % (backend_alias, )
714 repo_name = 'vcs_test_%s' % (backend_alias, )
715 repo_path = os.path.join(tests_tmp_path, repo_name)
715 repo_path = os.path.join(tests_tmp_path, repo_name)
716 backend = VcsBackend(
716 backend = VcsBackend(
717 alias=backend_alias,
717 alias=backend_alias,
718 repo_path=repo_path,
718 repo_path=repo_path,
719 test_name=request.node.name,
719 test_name=request.node.name,
720 test_repo_container=test_repo)
720 test_repo_container=test_repo)
721 request.addfinalizer(backend.cleanup)
721 request.addfinalizer(backend.cleanup)
722 return backend
722 return backend
723
723
724
724
725 @pytest.fixture
725 @pytest.fixture()
726 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
726 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
727 """
727 """
728 Parametrized fixture which represents a single vcs backend implementation.
728 Parametrized fixture which represents a single vcs backend implementation.
729
729
730 See the fixture `backend` for more details. This one implements the same
730 See the fixture `backend` for more details. This one implements the same
731 concept, but on vcs level. So it does not provide model instances etc.
731 concept, but on vcs level. So it does not provide model instances etc.
732
732
733 Parameters are generated dynamically, see :func:`pytest_generate_tests`
733 Parameters are generated dynamically, see :func:`pytest_generate_tests`
734 for how this works.
734 for how this works.
735 """
735 """
736 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
736 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
737
737
738
738
739 @pytest.fixture
739 @pytest.fixture()
740 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
740 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
741 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
741 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
742
742
743
743
744 @pytest.fixture
744 @pytest.fixture()
745 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
745 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
746 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
746 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
747
747
748
748
749 @pytest.fixture
749 @pytest.fixture()
750 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
750 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
751 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
751 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
752
752
753
753
754 @pytest.fixture
754 @pytest.fixture()
755 def vcsbackend_stub(vcsbackend_git):
755 def vcsbackend_stub(vcsbackend_git):
756 """
756 """
757 Use this to express that your test just needs a stub of a vcsbackend.
757 Use this to express that your test just needs a stub of a vcsbackend.
758
758
759 Plan is to eventually implement an in-memory stub to speed tests up.
759 Plan is to eventually implement an in-memory stub to speed tests up.
760 """
760 """
761 return vcsbackend_git
761 return vcsbackend_git
762
762
763
763
764 class VcsBackend(object):
764 class VcsBackend(object):
765 """
765 """
766 Represents the test configuration for one supported vcs backend.
766 Represents the test configuration for one supported vcs backend.
767 """
767 """
768
768
769 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
769 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
770
770
771 def __init__(self, alias, repo_path, test_name, test_repo_container):
771 def __init__(self, alias, repo_path, test_name, test_repo_container):
772 self.alias = alias
772 self.alias = alias
773 self._repo_path = repo_path
773 self._repo_path = repo_path
774 self._cleanup_repos = []
774 self._cleanup_repos = []
775 self._test_name = test_name
775 self._test_name = test_name
776 self._test_repo_container = test_repo_container
776 self._test_repo_container = test_repo_container
777
777
778 def __getitem__(self, key):
778 def __getitem__(self, key):
779 return self._test_repo_container(key, self.alias).scm_instance()
779 return self._test_repo_container(key, self.alias).scm_instance()
780
780
781 @property
781 @property
782 def repo(self):
782 def repo(self):
783 """
783 """
784 Returns the "current" repository. This is the vcs_test repo of the last
784 Returns the "current" repository. This is the vcs_test repo of the last
785 repo which has been created.
785 repo which has been created.
786 """
786 """
787 Repository = get_backend(self.alias)
787 Repository = get_backend(self.alias)
788 return Repository(self._repo_path)
788 return Repository(self._repo_path)
789
789
790 @property
790 @property
791 def backend(self):
791 def backend(self):
792 """
792 """
793 Returns the backend implementation class.
793 Returns the backend implementation class.
794 """
794 """
795 return get_backend(self.alias)
795 return get_backend(self.alias)
796
796
797 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
797 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
798 bare=False):
798 bare=False):
799 repo_name = self._next_repo_name()
799 repo_name = self._next_repo_name()
800 self._repo_path = get_new_dir(repo_name)
800 self._repo_path = get_new_dir(repo_name)
801 repo_class = get_backend(self.alias)
801 repo_class = get_backend(self.alias)
802 src_url = None
802 src_url = None
803 if _clone_repo:
803 if _clone_repo:
804 src_url = _clone_repo.path
804 src_url = _clone_repo.path
805 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
805 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
806 self._cleanup_repos.append(repo)
806 self._cleanup_repos.append(repo)
807
807
808 commits = commits or [
808 commits = commits or [
809 {'message': 'Commit %s of %s' % (x, repo_name)}
809 {'message': 'Commit %s of %s' % (x, repo_name)}
810 for x in xrange(number_of_commits)]
810 for x in xrange(number_of_commits)]
811 _add_commits_to_repo(repo, commits)
811 _add_commits_to_repo(repo, commits)
812 return repo
812 return repo
813
813
814 def clone_repo(self, repo):
814 def clone_repo(self, repo):
815 return self.create_repo(_clone_repo=repo)
815 return self.create_repo(_clone_repo=repo)
816
816
817 def cleanup(self):
817 def cleanup(self):
818 for repo in self._cleanup_repos:
818 for repo in self._cleanup_repos:
819 shutil.rmtree(repo.path)
819 shutil.rmtree(repo.path)
820
820
821 def new_repo_path(self):
821 def new_repo_path(self):
822 repo_name = self._next_repo_name()
822 repo_name = self._next_repo_name()
823 self._repo_path = get_new_dir(repo_name)
823 self._repo_path = get_new_dir(repo_name)
824 return self._repo_path
824 return self._repo_path
825
825
826 def _next_repo_name(self):
826 def _next_repo_name(self):
827 return "%s_%s" % (
827 return "%s_%s" % (
828 self.invalid_repo_name.sub('_', self._test_name),
828 self.invalid_repo_name.sub('_', self._test_name),
829 len(self._cleanup_repos))
829 len(self._cleanup_repos))
830
830
831 def add_file(self, repo, filename, content='Test content\n'):
831 def add_file(self, repo, filename, content='Test content\n'):
832 imc = repo.in_memory_commit
832 imc = repo.in_memory_commit
833 imc.add(FileNode(filename, content=content))
833 imc.add(FileNode(filename, content=content))
834 imc.commit(
834 imc.commit(
835 message=u'Automatic commit from vcsbackend fixture',
835 message=u'Automatic commit from vcsbackend fixture',
836 author=u'Automatic <automatic@rhodecode.com>')
836 author=u'Automatic <automatic@rhodecode.com>')
837
837
838 def ensure_file(self, filename, content='Test content\n'):
838 def ensure_file(self, filename, content='Test content\n'):
839 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
839 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
840 self.add_file(self.repo, filename, content)
840 self.add_file(self.repo, filename, content)
841
841
842
842
843 def _add_commits_to_repo(vcs_repo, commits):
843 def _add_commits_to_repo(vcs_repo, commits):
844 commit_ids = {}
844 commit_ids = {}
845 if not commits:
845 if not commits:
846 return commit_ids
846 return commit_ids
847
847
848 imc = vcs_repo.in_memory_commit
848 imc = vcs_repo.in_memory_commit
849 commit = None
849 commit = None
850
850
851 for idx, commit in enumerate(commits):
851 for idx, commit in enumerate(commits):
852 message = unicode(commit.get('message', 'Commit %s' % idx))
852 message = unicode(commit.get('message', 'Commit %s' % idx))
853
853
854 for node in commit.get('added', []):
854 for node in commit.get('added', []):
855 imc.add(FileNode(node.path, content=node.content))
855 imc.add(FileNode(node.path, content=node.content))
856 for node in commit.get('changed', []):
856 for node in commit.get('changed', []):
857 imc.change(FileNode(node.path, content=node.content))
857 imc.change(FileNode(node.path, content=node.content))
858 for node in commit.get('removed', []):
858 for node in commit.get('removed', []):
859 imc.remove(FileNode(node.path))
859 imc.remove(FileNode(node.path))
860
860
861 parents = [
861 parents = [
862 vcs_repo.get_commit(commit_id=commit_ids[p])
862 vcs_repo.get_commit(commit_id=commit_ids[p])
863 for p in commit.get('parents', [])]
863 for p in commit.get('parents', [])]
864
864
865 operations = ('added', 'changed', 'removed')
865 operations = ('added', 'changed', 'removed')
866 if not any((commit.get(o) for o in operations)):
866 if not any((commit.get(o) for o in operations)):
867 imc.add(FileNode('file_%s' % idx, content=message))
867 imc.add(FileNode('file_%s' % idx, content=message))
868
868
869 commit = imc.commit(
869 commit = imc.commit(
870 message=message,
870 message=message,
871 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
871 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
872 date=commit.get('date'),
872 date=commit.get('date'),
873 branch=commit.get('branch'),
873 branch=commit.get('branch'),
874 parents=parents)
874 parents=parents)
875
875
876 commit_ids[commit.message] = commit.raw_id
876 commit_ids[commit.message] = commit.raw_id
877
877
878 return commit_ids
878 return commit_ids
879
879
880
880
881 @pytest.fixture
881 @pytest.fixture()
882 def reposerver(request):
882 def reposerver(request):
883 """
883 """
884 Allows to serve a backend repository
884 Allows to serve a backend repository
885 """
885 """
886
886
887 repo_server = RepoServer()
887 repo_server = RepoServer()
888 request.addfinalizer(repo_server.cleanup)
888 request.addfinalizer(repo_server.cleanup)
889 return repo_server
889 return repo_server
890
890
891
891
892 class RepoServer(object):
892 class RepoServer(object):
893 """
893 """
894 Utility to serve a local repository for the duration of a test case.
894 Utility to serve a local repository for the duration of a test case.
895
895
896 Supports only Subversion so far.
896 Supports only Subversion so far.
897 """
897 """
898
898
899 url = None
899 url = None
900
900
901 def __init__(self):
901 def __init__(self):
902 self._cleanup_servers = []
902 self._cleanup_servers = []
903
903
904 def serve(self, vcsrepo):
904 def serve(self, vcsrepo):
905 if vcsrepo.alias != 'svn':
905 if vcsrepo.alias != 'svn':
906 raise TypeError("Backend %s not supported" % vcsrepo.alias)
906 raise TypeError("Backend %s not supported" % vcsrepo.alias)
907
907
908 proc = subprocess32.Popen(
908 proc = subprocess32.Popen(
909 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
909 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
910 '--root', vcsrepo.path])
910 '--root', vcsrepo.path])
911 self._cleanup_servers.append(proc)
911 self._cleanup_servers.append(proc)
912 self.url = 'svn://localhost'
912 self.url = 'svn://localhost'
913
913
914 def cleanup(self):
914 def cleanup(self):
915 for proc in self._cleanup_servers:
915 for proc in self._cleanup_servers:
916 proc.terminate()
916 proc.terminate()
917
917
918
918
919 @pytest.fixture
919 @pytest.fixture()
920 def pr_util(backend, request, config_stub):
920 def pr_util(backend, request, config_stub):
921 """
921 """
922 Utility for tests of models and for functional tests around pull requests.
922 Utility for tests of models and for functional tests around pull requests.
923
923
924 It gives an instance of :class:`PRTestUtility` which provides various
924 It gives an instance of :class:`PRTestUtility` which provides various
925 utility methods around one pull request.
925 utility methods around one pull request.
926
926
927 This fixture uses `backend` and inherits its parameterization.
927 This fixture uses `backend` and inherits its parameterization.
928 """
928 """
929
929
930 util = PRTestUtility(backend)
930 util = PRTestUtility(backend)
931 request.addfinalizer(util.cleanup)
931 request.addfinalizer(util.cleanup)
932
932
933 return util
933 return util
934
934
935
935
936 class PRTestUtility(object):
936 class PRTestUtility(object):
937
937
938 pull_request = None
938 pull_request = None
939 pull_request_id = None
939 pull_request_id = None
940 mergeable_patcher = None
940 mergeable_patcher = None
941 mergeable_mock = None
941 mergeable_mock = None
942 notification_patcher = None
942 notification_patcher = None
943
943
944 def __init__(self, backend):
944 def __init__(self, backend):
945 self.backend = backend
945 self.backend = backend
946
946
947 def create_pull_request(
947 def create_pull_request(
948 self, commits=None, target_head=None, source_head=None,
948 self, commits=None, target_head=None, source_head=None,
949 revisions=None, approved=False, author=None, mergeable=False,
949 revisions=None, approved=False, author=None, mergeable=False,
950 enable_notifications=True, name_suffix=u'', reviewers=None,
950 enable_notifications=True, name_suffix=u'', reviewers=None,
951 title=u"Test", description=u"Description"):
951 title=u"Test", description=u"Description"):
952 self.set_mergeable(mergeable)
952 self.set_mergeable(mergeable)
953 if not enable_notifications:
953 if not enable_notifications:
954 # mock notification side effect
954 # mock notification side effect
955 self.notification_patcher = mock.patch(
955 self.notification_patcher = mock.patch(
956 'rhodecode.model.notification.NotificationModel.create')
956 'rhodecode.model.notification.NotificationModel.create')
957 self.notification_patcher.start()
957 self.notification_patcher.start()
958
958
959 if not self.pull_request:
959 if not self.pull_request:
960 if not commits:
960 if not commits:
961 commits = [
961 commits = [
962 {'message': 'c1'},
962 {'message': 'c1'},
963 {'message': 'c2'},
963 {'message': 'c2'},
964 {'message': 'c3'},
964 {'message': 'c3'},
965 ]
965 ]
966 target_head = 'c1'
966 target_head = 'c1'
967 source_head = 'c2'
967 source_head = 'c2'
968 revisions = ['c2']
968 revisions = ['c2']
969
969
970 self.commit_ids = self.backend.create_master_repo(commits)
970 self.commit_ids = self.backend.create_master_repo(commits)
971 self.target_repository = self.backend.create_repo(
971 self.target_repository = self.backend.create_repo(
972 heads=[target_head], name_suffix=name_suffix)
972 heads=[target_head], name_suffix=name_suffix)
973 self.source_repository = self.backend.create_repo(
973 self.source_repository = self.backend.create_repo(
974 heads=[source_head], name_suffix=name_suffix)
974 heads=[source_head], name_suffix=name_suffix)
975 self.author = author or UserModel().get_by_username(
975 self.author = author or UserModel().get_by_username(
976 TEST_USER_ADMIN_LOGIN)
976 TEST_USER_ADMIN_LOGIN)
977
977
978 model = PullRequestModel()
978 model = PullRequestModel()
979 self.create_parameters = {
979 self.create_parameters = {
980 'created_by': self.author,
980 'created_by': self.author,
981 'source_repo': self.source_repository.repo_name,
981 'source_repo': self.source_repository.repo_name,
982 'source_ref': self._default_branch_reference(source_head),
982 'source_ref': self._default_branch_reference(source_head),
983 'target_repo': self.target_repository.repo_name,
983 'target_repo': self.target_repository.repo_name,
984 'target_ref': self._default_branch_reference(target_head),
984 'target_ref': self._default_branch_reference(target_head),
985 'revisions': [self.commit_ids[r] for r in revisions],
985 'revisions': [self.commit_ids[r] for r in revisions],
986 'reviewers': reviewers or self._get_reviewers(),
986 'reviewers': reviewers or self._get_reviewers(),
987 'title': title,
987 'title': title,
988 'description': description,
988 'description': description,
989 }
989 }
990 self.pull_request = model.create(**self.create_parameters)
990 self.pull_request = model.create(**self.create_parameters)
991 assert model.get_versions(self.pull_request) == []
991 assert model.get_versions(self.pull_request) == []
992
992
993 self.pull_request_id = self.pull_request.pull_request_id
993 self.pull_request_id = self.pull_request.pull_request_id
994
994
995 if approved:
995 if approved:
996 self.approve()
996 self.approve()
997
997
998 Session().add(self.pull_request)
998 Session().add(self.pull_request)
999 Session().commit()
999 Session().commit()
1000
1000
1001 return self.pull_request
1001 return self.pull_request
1002
1002
1003 def approve(self):
1003 def approve(self):
1004 self.create_status_votes(
1004 self.create_status_votes(
1005 ChangesetStatus.STATUS_APPROVED,
1005 ChangesetStatus.STATUS_APPROVED,
1006 *self.pull_request.reviewers)
1006 *self.pull_request.reviewers)
1007
1007
1008 def close(self):
1008 def close(self):
1009 PullRequestModel().close_pull_request(self.pull_request, self.author)
1009 PullRequestModel().close_pull_request(self.pull_request, self.author)
1010
1010
1011 def _default_branch_reference(self, commit_message):
1011 def _default_branch_reference(self, commit_message):
1012 reference = '%s:%s:%s' % (
1012 reference = '%s:%s:%s' % (
1013 'branch',
1013 'branch',
1014 self.backend.default_branch_name,
1014 self.backend.default_branch_name,
1015 self.commit_ids[commit_message])
1015 self.commit_ids[commit_message])
1016 return reference
1016 return reference
1017
1017
1018 def _get_reviewers(self):
1018 def _get_reviewers(self):
1019 return [
1019 return [
1020 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1020 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1021 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1021 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1022 ]
1022 ]
1023
1023
1024 def update_source_repository(self, head=None):
1024 def update_source_repository(self, head=None):
1025 heads = [head or 'c3']
1025 heads = [head or 'c3']
1026 self.backend.pull_heads(self.source_repository, heads=heads)
1026 self.backend.pull_heads(self.source_repository, heads=heads)
1027
1027
1028 def add_one_commit(self, head=None):
1028 def add_one_commit(self, head=None):
1029 self.update_source_repository(head=head)
1029 self.update_source_repository(head=head)
1030 old_commit_ids = set(self.pull_request.revisions)
1030 old_commit_ids = set(self.pull_request.revisions)
1031 PullRequestModel().update_commits(self.pull_request)
1031 PullRequestModel().update_commits(self.pull_request)
1032 commit_ids = set(self.pull_request.revisions)
1032 commit_ids = set(self.pull_request.revisions)
1033 new_commit_ids = commit_ids - old_commit_ids
1033 new_commit_ids = commit_ids - old_commit_ids
1034 assert len(new_commit_ids) == 1
1034 assert len(new_commit_ids) == 1
1035 return new_commit_ids.pop()
1035 return new_commit_ids.pop()
1036
1036
1037 def remove_one_commit(self):
1037 def remove_one_commit(self):
1038 assert len(self.pull_request.revisions) == 2
1038 assert len(self.pull_request.revisions) == 2
1039 source_vcs = self.source_repository.scm_instance()
1039 source_vcs = self.source_repository.scm_instance()
1040 removed_commit_id = source_vcs.commit_ids[-1]
1040 removed_commit_id = source_vcs.commit_ids[-1]
1041
1041
1042 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1042 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1043 # remove the if once that's sorted out.
1043 # remove the if once that's sorted out.
1044 if self.backend.alias == "git":
1044 if self.backend.alias == "git":
1045 kwargs = {'branch_name': self.backend.default_branch_name}
1045 kwargs = {'branch_name': self.backend.default_branch_name}
1046 else:
1046 else:
1047 kwargs = {}
1047 kwargs = {}
1048 source_vcs.strip(removed_commit_id, **kwargs)
1048 source_vcs.strip(removed_commit_id, **kwargs)
1049
1049
1050 PullRequestModel().update_commits(self.pull_request)
1050 PullRequestModel().update_commits(self.pull_request)
1051 assert len(self.pull_request.revisions) == 1
1051 assert len(self.pull_request.revisions) == 1
1052 return removed_commit_id
1052 return removed_commit_id
1053
1053
1054 def create_comment(self, linked_to=None):
1054 def create_comment(self, linked_to=None):
1055 comment = CommentsModel().create(
1055 comment = CommentsModel().create(
1056 text=u"Test comment",
1056 text=u"Test comment",
1057 repo=self.target_repository.repo_name,
1057 repo=self.target_repository.repo_name,
1058 user=self.author,
1058 user=self.author,
1059 pull_request=self.pull_request)
1059 pull_request=self.pull_request)
1060 assert comment.pull_request_version_id is None
1060 assert comment.pull_request_version_id is None
1061
1061
1062 if linked_to:
1062 if linked_to:
1063 PullRequestModel()._link_comments_to_version(linked_to)
1063 PullRequestModel()._link_comments_to_version(linked_to)
1064
1064
1065 return comment
1065 return comment
1066
1066
1067 def create_inline_comment(
1067 def create_inline_comment(
1068 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1068 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1069 comment = CommentsModel().create(
1069 comment = CommentsModel().create(
1070 text=u"Test comment",
1070 text=u"Test comment",
1071 repo=self.target_repository.repo_name,
1071 repo=self.target_repository.repo_name,
1072 user=self.author,
1072 user=self.author,
1073 line_no=line_no,
1073 line_no=line_no,
1074 f_path=file_path,
1074 f_path=file_path,
1075 pull_request=self.pull_request)
1075 pull_request=self.pull_request)
1076 assert comment.pull_request_version_id is None
1076 assert comment.pull_request_version_id is None
1077
1077
1078 if linked_to:
1078 if linked_to:
1079 PullRequestModel()._link_comments_to_version(linked_to)
1079 PullRequestModel()._link_comments_to_version(linked_to)
1080
1080
1081 return comment
1081 return comment
1082
1082
1083 def create_version_of_pull_request(self):
1083 def create_version_of_pull_request(self):
1084 pull_request = self.create_pull_request()
1084 pull_request = self.create_pull_request()
1085 version = PullRequestModel()._create_version_from_snapshot(
1085 version = PullRequestModel()._create_version_from_snapshot(
1086 pull_request)
1086 pull_request)
1087 return version
1087 return version
1088
1088
1089 def create_status_votes(self, status, *reviewers):
1089 def create_status_votes(self, status, *reviewers):
1090 for reviewer in reviewers:
1090 for reviewer in reviewers:
1091 ChangesetStatusModel().set_status(
1091 ChangesetStatusModel().set_status(
1092 repo=self.pull_request.target_repo,
1092 repo=self.pull_request.target_repo,
1093 status=status,
1093 status=status,
1094 user=reviewer.user_id,
1094 user=reviewer.user_id,
1095 pull_request=self.pull_request)
1095 pull_request=self.pull_request)
1096
1096
1097 def set_mergeable(self, value):
1097 def set_mergeable(self, value):
1098 if not self.mergeable_patcher:
1098 if not self.mergeable_patcher:
1099 self.mergeable_patcher = mock.patch.object(
1099 self.mergeable_patcher = mock.patch.object(
1100 VcsSettingsModel, 'get_general_settings')
1100 VcsSettingsModel, 'get_general_settings')
1101 self.mergeable_mock = self.mergeable_patcher.start()
1101 self.mergeable_mock = self.mergeable_patcher.start()
1102 self.mergeable_mock.return_value = {
1102 self.mergeable_mock.return_value = {
1103 'rhodecode_pr_merge_enabled': value}
1103 'rhodecode_pr_merge_enabled': value}
1104
1104
1105 def cleanup(self):
1105 def cleanup(self):
1106 # In case the source repository is already cleaned up, the pull
1106 # In case the source repository is already cleaned up, the pull
1107 # request will already be deleted.
1107 # request will already be deleted.
1108 pull_request = PullRequest().get(self.pull_request_id)
1108 pull_request = PullRequest().get(self.pull_request_id)
1109 if pull_request:
1109 if pull_request:
1110 PullRequestModel().delete(pull_request, pull_request.author)
1110 PullRequestModel().delete(pull_request, pull_request.author)
1111 Session().commit()
1111 Session().commit()
1112
1112
1113 if self.notification_patcher:
1113 if self.notification_patcher:
1114 self.notification_patcher.stop()
1114 self.notification_patcher.stop()
1115
1115
1116 if self.mergeable_patcher:
1116 if self.mergeable_patcher:
1117 self.mergeable_patcher.stop()
1117 self.mergeable_patcher.stop()
1118
1118
1119
1119
1120 @pytest.fixture
1120 @pytest.fixture()
1121 def user_admin(baseapp):
1121 def user_admin(baseapp):
1122 """
1122 """
1123 Provides the default admin test user as an instance of `db.User`.
1123 Provides the default admin test user as an instance of `db.User`.
1124 """
1124 """
1125 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1125 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1126 return user
1126 return user
1127
1127
1128
1128
1129 @pytest.fixture
1129 @pytest.fixture()
1130 def user_regular(baseapp):
1130 def user_regular(baseapp):
1131 """
1131 """
1132 Provides the default regular test user as an instance of `db.User`.
1132 Provides the default regular test user as an instance of `db.User`.
1133 """
1133 """
1134 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1134 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1135 return user
1135 return user
1136
1136
1137
1137
1138 @pytest.fixture
1138 @pytest.fixture()
1139 def user_util(request, db_connection):
1139 def user_util(request, db_connection):
1140 """
1140 """
1141 Provides a wired instance of `UserUtility` with integrated cleanup.
1141 Provides a wired instance of `UserUtility` with integrated cleanup.
1142 """
1142 """
1143 utility = UserUtility(test_name=request.node.name)
1143 utility = UserUtility(test_name=request.node.name)
1144 request.addfinalizer(utility.cleanup)
1144 request.addfinalizer(utility.cleanup)
1145 return utility
1145 return utility
1146
1146
1147
1147
1148 # TODO: johbo: Split this up into utilities per domain or something similar
1148 # TODO: johbo: Split this up into utilities per domain or something similar
1149 class UserUtility(object):
1149 class UserUtility(object):
1150
1150
1151 def __init__(self, test_name="test"):
1151 def __init__(self, test_name="test"):
1152 self._test_name = self._sanitize_name(test_name)
1152 self._test_name = self._sanitize_name(test_name)
1153 self.fixture = Fixture()
1153 self.fixture = Fixture()
1154 self.repo_group_ids = []
1154 self.repo_group_ids = []
1155 self.repos_ids = []
1155 self.repos_ids = []
1156 self.user_ids = []
1156 self.user_ids = []
1157 self.user_group_ids = []
1157 self.user_group_ids = []
1158 self.user_repo_permission_ids = []
1158 self.user_repo_permission_ids = []
1159 self.user_group_repo_permission_ids = []
1159 self.user_group_repo_permission_ids = []
1160 self.user_repo_group_permission_ids = []
1160 self.user_repo_group_permission_ids = []
1161 self.user_group_repo_group_permission_ids = []
1161 self.user_group_repo_group_permission_ids = []
1162 self.user_user_group_permission_ids = []
1162 self.user_user_group_permission_ids = []
1163 self.user_group_user_group_permission_ids = []
1163 self.user_group_user_group_permission_ids = []
1164 self.user_permissions = []
1164 self.user_permissions = []
1165
1165
1166 def _sanitize_name(self, name):
1166 def _sanitize_name(self, name):
1167 for char in ['[', ']']:
1167 for char in ['[', ']']:
1168 name = name.replace(char, '_')
1168 name = name.replace(char, '_')
1169 return name
1169 return name
1170
1170
1171 def create_repo_group(
1171 def create_repo_group(
1172 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1172 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1173 group_name = "{prefix}_repogroup_{count}".format(
1173 group_name = "{prefix}_repogroup_{count}".format(
1174 prefix=self._test_name,
1174 prefix=self._test_name,
1175 count=len(self.repo_group_ids))
1175 count=len(self.repo_group_ids))
1176 repo_group = self.fixture.create_repo_group(
1176 repo_group = self.fixture.create_repo_group(
1177 group_name, cur_user=owner)
1177 group_name, cur_user=owner)
1178 if auto_cleanup:
1178 if auto_cleanup:
1179 self.repo_group_ids.append(repo_group.group_id)
1179 self.repo_group_ids.append(repo_group.group_id)
1180 return repo_group
1180 return repo_group
1181
1181
1182 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1182 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1183 auto_cleanup=True, repo_type='hg', bare=False):
1183 auto_cleanup=True, repo_type='hg', bare=False):
1184 repo_name = "{prefix}_repository_{count}".format(
1184 repo_name = "{prefix}_repository_{count}".format(
1185 prefix=self._test_name,
1185 prefix=self._test_name,
1186 count=len(self.repos_ids))
1186 count=len(self.repos_ids))
1187
1187
1188 repository = self.fixture.create_repo(
1188 repository = self.fixture.create_repo(
1189 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1189 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1190 if auto_cleanup:
1190 if auto_cleanup:
1191 self.repos_ids.append(repository.repo_id)
1191 self.repos_ids.append(repository.repo_id)
1192 return repository
1192 return repository
1193
1193
1194 def create_user(self, auto_cleanup=True, **kwargs):
1194 def create_user(self, auto_cleanup=True, **kwargs):
1195 user_name = "{prefix}_user_{count}".format(
1195 user_name = "{prefix}_user_{count}".format(
1196 prefix=self._test_name,
1196 prefix=self._test_name,
1197 count=len(self.user_ids))
1197 count=len(self.user_ids))
1198 user = self.fixture.create_user(user_name, **kwargs)
1198 user = self.fixture.create_user(user_name, **kwargs)
1199 if auto_cleanup:
1199 if auto_cleanup:
1200 self.user_ids.append(user.user_id)
1200 self.user_ids.append(user.user_id)
1201 return user
1201 return user
1202
1202
1203 def create_additional_user_email(self, user, email):
1203 def create_additional_user_email(self, user, email):
1204 uem = self.fixture.create_additional_user_email(user=user, email=email)
1204 uem = self.fixture.create_additional_user_email(user=user, email=email)
1205 return uem
1205 return uem
1206
1206
1207 def create_user_with_group(self):
1207 def create_user_with_group(self):
1208 user = self.create_user()
1208 user = self.create_user()
1209 user_group = self.create_user_group(members=[user])
1209 user_group = self.create_user_group(members=[user])
1210 return user, user_group
1210 return user, user_group
1211
1211
1212 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1212 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1213 auto_cleanup=True, **kwargs):
1213 auto_cleanup=True, **kwargs):
1214 group_name = "{prefix}_usergroup_{count}".format(
1214 group_name = "{prefix}_usergroup_{count}".format(
1215 prefix=self._test_name,
1215 prefix=self._test_name,
1216 count=len(self.user_group_ids))
1216 count=len(self.user_group_ids))
1217 user_group = self.fixture.create_user_group(
1217 user_group = self.fixture.create_user_group(
1218 group_name, cur_user=owner, **kwargs)
1218 group_name, cur_user=owner, **kwargs)
1219
1219
1220 if auto_cleanup:
1220 if auto_cleanup:
1221 self.user_group_ids.append(user_group.users_group_id)
1221 self.user_group_ids.append(user_group.users_group_id)
1222 if members:
1222 if members:
1223 for user in members:
1223 for user in members:
1224 UserGroupModel().add_user_to_group(user_group, user)
1224 UserGroupModel().add_user_to_group(user_group, user)
1225 return user_group
1225 return user_group
1226
1226
1227 def grant_user_permission(self, user_name, permission_name):
1227 def grant_user_permission(self, user_name, permission_name):
1228 self.inherit_default_user_permissions(user_name, False)
1228 self.inherit_default_user_permissions(user_name, False)
1229 self.user_permissions.append((user_name, permission_name))
1229 self.user_permissions.append((user_name, permission_name))
1230
1230
1231 def grant_user_permission_to_repo_group(
1231 def grant_user_permission_to_repo_group(
1232 self, repo_group, user, permission_name):
1232 self, repo_group, user, permission_name):
1233 permission = RepoGroupModel().grant_user_permission(
1233 permission = RepoGroupModel().grant_user_permission(
1234 repo_group, user, permission_name)
1234 repo_group, user, permission_name)
1235 self.user_repo_group_permission_ids.append(
1235 self.user_repo_group_permission_ids.append(
1236 (repo_group.group_id, user.user_id))
1236 (repo_group.group_id, user.user_id))
1237 return permission
1237 return permission
1238
1238
1239 def grant_user_group_permission_to_repo_group(
1239 def grant_user_group_permission_to_repo_group(
1240 self, repo_group, user_group, permission_name):
1240 self, repo_group, user_group, permission_name):
1241 permission = RepoGroupModel().grant_user_group_permission(
1241 permission = RepoGroupModel().grant_user_group_permission(
1242 repo_group, user_group, permission_name)
1242 repo_group, user_group, permission_name)
1243 self.user_group_repo_group_permission_ids.append(
1243 self.user_group_repo_group_permission_ids.append(
1244 (repo_group.group_id, user_group.users_group_id))
1244 (repo_group.group_id, user_group.users_group_id))
1245 return permission
1245 return permission
1246
1246
1247 def grant_user_permission_to_repo(
1247 def grant_user_permission_to_repo(
1248 self, repo, user, permission_name):
1248 self, repo, user, permission_name):
1249 permission = RepoModel().grant_user_permission(
1249 permission = RepoModel().grant_user_permission(
1250 repo, user, permission_name)
1250 repo, user, permission_name)
1251 self.user_repo_permission_ids.append(
1251 self.user_repo_permission_ids.append(
1252 (repo.repo_id, user.user_id))
1252 (repo.repo_id, user.user_id))
1253 return permission
1253 return permission
1254
1254
1255 def grant_user_group_permission_to_repo(
1255 def grant_user_group_permission_to_repo(
1256 self, repo, user_group, permission_name):
1256 self, repo, user_group, permission_name):
1257 permission = RepoModel().grant_user_group_permission(
1257 permission = RepoModel().grant_user_group_permission(
1258 repo, user_group, permission_name)
1258 repo, user_group, permission_name)
1259 self.user_group_repo_permission_ids.append(
1259 self.user_group_repo_permission_ids.append(
1260 (repo.repo_id, user_group.users_group_id))
1260 (repo.repo_id, user_group.users_group_id))
1261 return permission
1261 return permission
1262
1262
1263 def grant_user_permission_to_user_group(
1263 def grant_user_permission_to_user_group(
1264 self, target_user_group, user, permission_name):
1264 self, target_user_group, user, permission_name):
1265 permission = UserGroupModel().grant_user_permission(
1265 permission = UserGroupModel().grant_user_permission(
1266 target_user_group, user, permission_name)
1266 target_user_group, user, permission_name)
1267 self.user_user_group_permission_ids.append(
1267 self.user_user_group_permission_ids.append(
1268 (target_user_group.users_group_id, user.user_id))
1268 (target_user_group.users_group_id, user.user_id))
1269 return permission
1269 return permission
1270
1270
1271 def grant_user_group_permission_to_user_group(
1271 def grant_user_group_permission_to_user_group(
1272 self, target_user_group, user_group, permission_name):
1272 self, target_user_group, user_group, permission_name):
1273 permission = UserGroupModel().grant_user_group_permission(
1273 permission = UserGroupModel().grant_user_group_permission(
1274 target_user_group, user_group, permission_name)
1274 target_user_group, user_group, permission_name)
1275 self.user_group_user_group_permission_ids.append(
1275 self.user_group_user_group_permission_ids.append(
1276 (target_user_group.users_group_id, user_group.users_group_id))
1276 (target_user_group.users_group_id, user_group.users_group_id))
1277 return permission
1277 return permission
1278
1278
1279 def revoke_user_permission(self, user_name, permission_name):
1279 def revoke_user_permission(self, user_name, permission_name):
1280 self.inherit_default_user_permissions(user_name, True)
1280 self.inherit_default_user_permissions(user_name, True)
1281 UserModel().revoke_perm(user_name, permission_name)
1281 UserModel().revoke_perm(user_name, permission_name)
1282
1282
1283 def inherit_default_user_permissions(self, user_name, value):
1283 def inherit_default_user_permissions(self, user_name, value):
1284 user = UserModel().get_by_username(user_name)
1284 user = UserModel().get_by_username(user_name)
1285 user.inherit_default_permissions = value
1285 user.inherit_default_permissions = value
1286 Session().add(user)
1286 Session().add(user)
1287 Session().commit()
1287 Session().commit()
1288
1288
1289 def cleanup(self):
1289 def cleanup(self):
1290 self._cleanup_permissions()
1290 self._cleanup_permissions()
1291 self._cleanup_repos()
1291 self._cleanup_repos()
1292 self._cleanup_repo_groups()
1292 self._cleanup_repo_groups()
1293 self._cleanup_user_groups()
1293 self._cleanup_user_groups()
1294 self._cleanup_users()
1294 self._cleanup_users()
1295
1295
1296 def _cleanup_permissions(self):
1296 def _cleanup_permissions(self):
1297 if self.user_permissions:
1297 if self.user_permissions:
1298 for user_name, permission_name in self.user_permissions:
1298 for user_name, permission_name in self.user_permissions:
1299 self.revoke_user_permission(user_name, permission_name)
1299 self.revoke_user_permission(user_name, permission_name)
1300
1300
1301 for permission in self.user_repo_permission_ids:
1301 for permission in self.user_repo_permission_ids:
1302 RepoModel().revoke_user_permission(*permission)
1302 RepoModel().revoke_user_permission(*permission)
1303
1303
1304 for permission in self.user_group_repo_permission_ids:
1304 for permission in self.user_group_repo_permission_ids:
1305 RepoModel().revoke_user_group_permission(*permission)
1305 RepoModel().revoke_user_group_permission(*permission)
1306
1306
1307 for permission in self.user_repo_group_permission_ids:
1307 for permission in self.user_repo_group_permission_ids:
1308 RepoGroupModel().revoke_user_permission(*permission)
1308 RepoGroupModel().revoke_user_permission(*permission)
1309
1309
1310 for permission in self.user_group_repo_group_permission_ids:
1310 for permission in self.user_group_repo_group_permission_ids:
1311 RepoGroupModel().revoke_user_group_permission(*permission)
1311 RepoGroupModel().revoke_user_group_permission(*permission)
1312
1312
1313 for permission in self.user_user_group_permission_ids:
1313 for permission in self.user_user_group_permission_ids:
1314 UserGroupModel().revoke_user_permission(*permission)
1314 UserGroupModel().revoke_user_permission(*permission)
1315
1315
1316 for permission in self.user_group_user_group_permission_ids:
1316 for permission in self.user_group_user_group_permission_ids:
1317 UserGroupModel().revoke_user_group_permission(*permission)
1317 UserGroupModel().revoke_user_group_permission(*permission)
1318
1318
1319 def _cleanup_repo_groups(self):
1319 def _cleanup_repo_groups(self):
1320 def _repo_group_compare(first_group_id, second_group_id):
1320 def _repo_group_compare(first_group_id, second_group_id):
1321 """
1321 """
1322 Gives higher priority to the groups with the most complex paths
1322 Gives higher priority to the groups with the most complex paths
1323 """
1323 """
1324 first_group = RepoGroup.get(first_group_id)
1324 first_group = RepoGroup.get(first_group_id)
1325 second_group = RepoGroup.get(second_group_id)
1325 second_group = RepoGroup.get(second_group_id)
1326 first_group_parts = (
1326 first_group_parts = (
1327 len(first_group.group_name.split('/')) if first_group else 0)
1327 len(first_group.group_name.split('/')) if first_group else 0)
1328 second_group_parts = (
1328 second_group_parts = (
1329 len(second_group.group_name.split('/')) if second_group else 0)
1329 len(second_group.group_name.split('/')) if second_group else 0)
1330 return cmp(second_group_parts, first_group_parts)
1330 return cmp(second_group_parts, first_group_parts)
1331
1331
1332 sorted_repo_group_ids = sorted(
1332 sorted_repo_group_ids = sorted(
1333 self.repo_group_ids, cmp=_repo_group_compare)
1333 self.repo_group_ids, cmp=_repo_group_compare)
1334 for repo_group_id in sorted_repo_group_ids:
1334 for repo_group_id in sorted_repo_group_ids:
1335 self.fixture.destroy_repo_group(repo_group_id)
1335 self.fixture.destroy_repo_group(repo_group_id)
1336
1336
1337 def _cleanup_repos(self):
1337 def _cleanup_repos(self):
1338 sorted_repos_ids = sorted(self.repos_ids)
1338 sorted_repos_ids = sorted(self.repos_ids)
1339 for repo_id in sorted_repos_ids:
1339 for repo_id in sorted_repos_ids:
1340 self.fixture.destroy_repo(repo_id)
1340 self.fixture.destroy_repo(repo_id)
1341
1341
1342 def _cleanup_user_groups(self):
1342 def _cleanup_user_groups(self):
1343 def _user_group_compare(first_group_id, second_group_id):
1343 def _user_group_compare(first_group_id, second_group_id):
1344 """
1344 """
1345 Gives higher priority to the groups with the most complex paths
1345 Gives higher priority to the groups with the most complex paths
1346 """
1346 """
1347 first_group = UserGroup.get(first_group_id)
1347 first_group = UserGroup.get(first_group_id)
1348 second_group = UserGroup.get(second_group_id)
1348 second_group = UserGroup.get(second_group_id)
1349 first_group_parts = (
1349 first_group_parts = (
1350 len(first_group.users_group_name.split('/'))
1350 len(first_group.users_group_name.split('/'))
1351 if first_group else 0)
1351 if first_group else 0)
1352 second_group_parts = (
1352 second_group_parts = (
1353 len(second_group.users_group_name.split('/'))
1353 len(second_group.users_group_name.split('/'))
1354 if second_group else 0)
1354 if second_group else 0)
1355 return cmp(second_group_parts, first_group_parts)
1355 return cmp(second_group_parts, first_group_parts)
1356
1356
1357 sorted_user_group_ids = sorted(
1357 sorted_user_group_ids = sorted(
1358 self.user_group_ids, cmp=_user_group_compare)
1358 self.user_group_ids, cmp=_user_group_compare)
1359 for user_group_id in sorted_user_group_ids:
1359 for user_group_id in sorted_user_group_ids:
1360 self.fixture.destroy_user_group(user_group_id)
1360 self.fixture.destroy_user_group(user_group_id)
1361
1361
1362 def _cleanup_users(self):
1362 def _cleanup_users(self):
1363 for user_id in self.user_ids:
1363 for user_id in self.user_ids:
1364 self.fixture.destroy_user(user_id)
1364 self.fixture.destroy_user(user_id)
1365
1365
1366
1366
1367 # TODO: Think about moving this into a pytest-pyro package and make it a
1367 # TODO: Think about moving this into a pytest-pyro package and make it a
1368 # pytest plugin
1368 # pytest plugin
1369 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1369 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1370 def pytest_runtest_makereport(item, call):
1370 def pytest_runtest_makereport(item, call):
1371 """
1371 """
1372 Adding the remote traceback if the exception has this information.
1372 Adding the remote traceback if the exception has this information.
1373
1373
1374 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1374 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1375 to the exception instance.
1375 to the exception instance.
1376 """
1376 """
1377 outcome = yield
1377 outcome = yield
1378 report = outcome.get_result()
1378 report = outcome.get_result()
1379 if call.excinfo:
1379 if call.excinfo:
1380 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1380 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1381
1381
1382
1382
1383 def _add_vcsserver_remote_traceback(report, exc):
1383 def _add_vcsserver_remote_traceback(report, exc):
1384 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1384 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1385
1385
1386 if vcsserver_traceback:
1386 if vcsserver_traceback:
1387 section = 'VCSServer remote traceback ' + report.when
1387 section = 'VCSServer remote traceback ' + report.when
1388 report.sections.append((section, vcsserver_traceback))
1388 report.sections.append((section, vcsserver_traceback))
1389
1389
1390
1390
1391 @pytest.fixture(scope='session')
1391 @pytest.fixture(scope='session')
1392 def testrun():
1392 def testrun():
1393 return {
1393 return {
1394 'uuid': uuid.uuid4(),
1394 'uuid': uuid.uuid4(),
1395 'start': datetime.datetime.utcnow().isoformat(),
1395 'start': datetime.datetime.utcnow().isoformat(),
1396 'timestamp': int(time.time()),
1396 'timestamp': int(time.time()),
1397 }
1397 }
1398
1398
1399
1399
1400 class AppenlightClient(object):
1400 class AppenlightClient(object):
1401
1401
1402 url_template = '{url}?protocol_version=0.5'
1402 url_template = '{url}?protocol_version=0.5'
1403
1403
1404 def __init__(
1404 def __init__(
1405 self, url, api_key, add_server=True, add_timestamp=True,
1405 self, url, api_key, add_server=True, add_timestamp=True,
1406 namespace=None, request=None, testrun=None):
1406 namespace=None, request=None, testrun=None):
1407 self.url = self.url_template.format(url=url)
1407 self.url = self.url_template.format(url=url)
1408 self.api_key = api_key
1408 self.api_key = api_key
1409 self.add_server = add_server
1409 self.add_server = add_server
1410 self.add_timestamp = add_timestamp
1410 self.add_timestamp = add_timestamp
1411 self.namespace = namespace
1411 self.namespace = namespace
1412 self.request = request
1412 self.request = request
1413 self.server = socket.getfqdn(socket.gethostname())
1413 self.server = socket.getfqdn(socket.gethostname())
1414 self.tags_before = {}
1414 self.tags_before = {}
1415 self.tags_after = {}
1415 self.tags_after = {}
1416 self.stats = []
1416 self.stats = []
1417 self.testrun = testrun or {}
1417 self.testrun = testrun or {}
1418
1418
1419 def tag_before(self, tag, value):
1419 def tag_before(self, tag, value):
1420 self.tags_before[tag] = value
1420 self.tags_before[tag] = value
1421
1421
1422 def tag_after(self, tag, value):
1422 def tag_after(self, tag, value):
1423 self.tags_after[tag] = value
1423 self.tags_after[tag] = value
1424
1424
1425 def collect(self, data):
1425 def collect(self, data):
1426 if self.add_server:
1426 if self.add_server:
1427 data.setdefault('server', self.server)
1427 data.setdefault('server', self.server)
1428 if self.add_timestamp:
1428 if self.add_timestamp:
1429 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1429 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1430 if self.namespace:
1430 if self.namespace:
1431 data.setdefault('namespace', self.namespace)
1431 data.setdefault('namespace', self.namespace)
1432 if self.request:
1432 if self.request:
1433 data.setdefault('request', self.request)
1433 data.setdefault('request', self.request)
1434 self.stats.append(data)
1434 self.stats.append(data)
1435
1435
1436 def send_stats(self):
1436 def send_stats(self):
1437 tags = [
1437 tags = [
1438 ('testrun', self.request),
1438 ('testrun', self.request),
1439 ('testrun.start', self.testrun['start']),
1439 ('testrun.start', self.testrun['start']),
1440 ('testrun.timestamp', self.testrun['timestamp']),
1440 ('testrun.timestamp', self.testrun['timestamp']),
1441 ('test', self.namespace),
1441 ('test', self.namespace),
1442 ]
1442 ]
1443 for key, value in self.tags_before.items():
1443 for key, value in self.tags_before.items():
1444 tags.append((key + '.before', value))
1444 tags.append((key + '.before', value))
1445 try:
1445 try:
1446 delta = self.tags_after[key] - value
1446 delta = self.tags_after[key] - value
1447 tags.append((key + '.delta', delta))
1447 tags.append((key + '.delta', delta))
1448 except Exception:
1448 except Exception:
1449 pass
1449 pass
1450 for key, value in self.tags_after.items():
1450 for key, value in self.tags_after.items():
1451 tags.append((key + '.after', value))
1451 tags.append((key + '.after', value))
1452 self.collect({
1452 self.collect({
1453 'message': "Collected tags",
1453 'message': "Collected tags",
1454 'tags': tags,
1454 'tags': tags,
1455 })
1455 })
1456
1456
1457 response = requests.post(
1457 response = requests.post(
1458 self.url,
1458 self.url,
1459 headers={
1459 headers={
1460 'X-appenlight-api-key': self.api_key},
1460 'X-appenlight-api-key': self.api_key},
1461 json=self.stats,
1461 json=self.stats,
1462 )
1462 )
1463
1463
1464 if not response.status_code == 200:
1464 if not response.status_code == 200:
1465 pprint.pprint(self.stats)
1465 pprint.pprint(self.stats)
1466 print(response.headers)
1466 print(response.headers)
1467 print(response.text)
1467 print(response.text)
1468 raise Exception('Sending to appenlight failed')
1468 raise Exception('Sending to appenlight failed')
1469
1469
1470
1470
1471 @pytest.fixture
1471 @pytest.fixture()
1472 def gist_util(request, db_connection):
1472 def gist_util(request, db_connection):
1473 """
1473 """
1474 Provides a wired instance of `GistUtility` with integrated cleanup.
1474 Provides a wired instance of `GistUtility` with integrated cleanup.
1475 """
1475 """
1476 utility = GistUtility()
1476 utility = GistUtility()
1477 request.addfinalizer(utility.cleanup)
1477 request.addfinalizer(utility.cleanup)
1478 return utility
1478 return utility
1479
1479
1480
1480
1481 class GistUtility(object):
1481 class GistUtility(object):
1482 def __init__(self):
1482 def __init__(self):
1483 self.fixture = Fixture()
1483 self.fixture = Fixture()
1484 self.gist_ids = []
1484 self.gist_ids = []
1485
1485
1486 def create_gist(self, **kwargs):
1486 def create_gist(self, **kwargs):
1487 gist = self.fixture.create_gist(**kwargs)
1487 gist = self.fixture.create_gist(**kwargs)
1488 self.gist_ids.append(gist.gist_id)
1488 self.gist_ids.append(gist.gist_id)
1489 return gist
1489 return gist
1490
1490
1491 def cleanup(self):
1491 def cleanup(self):
1492 for id_ in self.gist_ids:
1492 for id_ in self.gist_ids:
1493 self.fixture.destroy_gists(str(id_))
1493 self.fixture.destroy_gists(str(id_))
1494
1494
1495
1495
1496 @pytest.fixture
1496 @pytest.fixture()
1497 def enabled_backends(request):
1497 def enabled_backends(request):
1498 backends = request.config.option.backends
1498 backends = request.config.option.backends
1499 return backends[:]
1499 return backends[:]
1500
1500
1501
1501
1502 @pytest.fixture
1502 @pytest.fixture()
1503 def settings_util(request, db_connection):
1503 def settings_util(request, db_connection):
1504 """
1504 """
1505 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1505 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1506 """
1506 """
1507 utility = SettingsUtility()
1507 utility = SettingsUtility()
1508 request.addfinalizer(utility.cleanup)
1508 request.addfinalizer(utility.cleanup)
1509 return utility
1509 return utility
1510
1510
1511
1511
1512 class SettingsUtility(object):
1512 class SettingsUtility(object):
1513 def __init__(self):
1513 def __init__(self):
1514 self.rhodecode_ui_ids = []
1514 self.rhodecode_ui_ids = []
1515 self.rhodecode_setting_ids = []
1515 self.rhodecode_setting_ids = []
1516 self.repo_rhodecode_ui_ids = []
1516 self.repo_rhodecode_ui_ids = []
1517 self.repo_rhodecode_setting_ids = []
1517 self.repo_rhodecode_setting_ids = []
1518
1518
1519 def create_repo_rhodecode_ui(
1519 def create_repo_rhodecode_ui(
1520 self, repo, section, value, key=None, active=True, cleanup=True):
1520 self, repo, section, value, key=None, active=True, cleanup=True):
1521 key = key or hashlib.sha1(
1521 key = key or hashlib.sha1(
1522 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1522 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1523
1523
1524 setting = RepoRhodeCodeUi()
1524 setting = RepoRhodeCodeUi()
1525 setting.repository_id = repo.repo_id
1525 setting.repository_id = repo.repo_id
1526 setting.ui_section = section
1526 setting.ui_section = section
1527 setting.ui_value = value
1527 setting.ui_value = value
1528 setting.ui_key = key
1528 setting.ui_key = key
1529 setting.ui_active = active
1529 setting.ui_active = active
1530 Session().add(setting)
1530 Session().add(setting)
1531 Session().commit()
1531 Session().commit()
1532
1532
1533 if cleanup:
1533 if cleanup:
1534 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1534 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1535 return setting
1535 return setting
1536
1536
1537 def create_rhodecode_ui(
1537 def create_rhodecode_ui(
1538 self, section, value, key=None, active=True, cleanup=True):
1538 self, section, value, key=None, active=True, cleanup=True):
1539 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1539 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1540
1540
1541 setting = RhodeCodeUi()
1541 setting = RhodeCodeUi()
1542 setting.ui_section = section
1542 setting.ui_section = section
1543 setting.ui_value = value
1543 setting.ui_value = value
1544 setting.ui_key = key
1544 setting.ui_key = key
1545 setting.ui_active = active
1545 setting.ui_active = active
1546 Session().add(setting)
1546 Session().add(setting)
1547 Session().commit()
1547 Session().commit()
1548
1548
1549 if cleanup:
1549 if cleanup:
1550 self.rhodecode_ui_ids.append(setting.ui_id)
1550 self.rhodecode_ui_ids.append(setting.ui_id)
1551 return setting
1551 return setting
1552
1552
1553 def create_repo_rhodecode_setting(
1553 def create_repo_rhodecode_setting(
1554 self, repo, name, value, type_, cleanup=True):
1554 self, repo, name, value, type_, cleanup=True):
1555 setting = RepoRhodeCodeSetting(
1555 setting = RepoRhodeCodeSetting(
1556 repo.repo_id, key=name, val=value, type=type_)
1556 repo.repo_id, key=name, val=value, type=type_)
1557 Session().add(setting)
1557 Session().add(setting)
1558 Session().commit()
1558 Session().commit()
1559
1559
1560 if cleanup:
1560 if cleanup:
1561 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1561 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1562 return setting
1562 return setting
1563
1563
1564 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1564 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1565 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1565 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1566 Session().add(setting)
1566 Session().add(setting)
1567 Session().commit()
1567 Session().commit()
1568
1568
1569 if cleanup:
1569 if cleanup:
1570 self.rhodecode_setting_ids.append(setting.app_settings_id)
1570 self.rhodecode_setting_ids.append(setting.app_settings_id)
1571
1571
1572 return setting
1572 return setting
1573
1573
1574 def cleanup(self):
1574 def cleanup(self):
1575 for id_ in self.rhodecode_ui_ids:
1575 for id_ in self.rhodecode_ui_ids:
1576 setting = RhodeCodeUi.get(id_)
1576 setting = RhodeCodeUi.get(id_)
1577 Session().delete(setting)
1577 Session().delete(setting)
1578
1578
1579 for id_ in self.rhodecode_setting_ids:
1579 for id_ in self.rhodecode_setting_ids:
1580 setting = RhodeCodeSetting.get(id_)
1580 setting = RhodeCodeSetting.get(id_)
1581 Session().delete(setting)
1581 Session().delete(setting)
1582
1582
1583 for id_ in self.repo_rhodecode_ui_ids:
1583 for id_ in self.repo_rhodecode_ui_ids:
1584 setting = RepoRhodeCodeUi.get(id_)
1584 setting = RepoRhodeCodeUi.get(id_)
1585 Session().delete(setting)
1585 Session().delete(setting)
1586
1586
1587 for id_ in self.repo_rhodecode_setting_ids:
1587 for id_ in self.repo_rhodecode_setting_ids:
1588 setting = RepoRhodeCodeSetting.get(id_)
1588 setting = RepoRhodeCodeSetting.get(id_)
1589 Session().delete(setting)
1589 Session().delete(setting)
1590
1590
1591 Session().commit()
1591 Session().commit()
1592
1592
1593
1593
1594 @pytest.fixture
1594 @pytest.fixture()
1595 def no_notifications(request):
1595 def no_notifications(request):
1596 notification_patcher = mock.patch(
1596 notification_patcher = mock.patch(
1597 'rhodecode.model.notification.NotificationModel.create')
1597 'rhodecode.model.notification.NotificationModel.create')
1598 notification_patcher.start()
1598 notification_patcher.start()
1599 request.addfinalizer(notification_patcher.stop)
1599 request.addfinalizer(notification_patcher.stop)
1600
1600
1601
1601
1602 @pytest.fixture(scope='session')
1602 @pytest.fixture(scope='session')
1603 def repeat(request):
1603 def repeat(request):
1604 """
1604 """
1605 The number of repetitions is based on this fixture.
1605 The number of repetitions is based on this fixture.
1606
1606
1607 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1607 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1608 tests are not too slow in our default test suite.
1608 tests are not too slow in our default test suite.
1609 """
1609 """
1610 return request.config.getoption('--repeat')
1610 return request.config.getoption('--repeat')
1611
1611
1612
1612
1613 @pytest.fixture
1613 @pytest.fixture()
1614 def rhodecode_fixtures():
1614 def rhodecode_fixtures():
1615 return Fixture()
1615 return Fixture()
1616
1616
1617
1617
1618 @pytest.fixture
1618 @pytest.fixture()
1619 def context_stub():
1619 def context_stub():
1620 """
1620 """
1621 Stub context object.
1621 Stub context object.
1622 """
1622 """
1623 context = pyramid.testing.DummyResource()
1623 context = pyramid.testing.DummyResource()
1624 return context
1624 return context
1625
1625
1626
1626
1627 @pytest.fixture
1627 @pytest.fixture()
1628 def request_stub():
1628 def request_stub():
1629 """
1629 """
1630 Stub request object.
1630 Stub request object.
1631 """
1631 """
1632 from rhodecode.lib.base import bootstrap_request
1632 from rhodecode.lib.base import bootstrap_request
1633 request = bootstrap_request(scheme='https')
1633 request = bootstrap_request(scheme='https')
1634 return request
1634 return request
1635
1635
1636
1636
1637 @pytest.fixture
1637 @pytest.fixture()
1638 def config_stub(request, request_stub):
1638 def config_stub(request, request_stub):
1639 """
1639 """
1640 Set up pyramid.testing and return the Configurator.
1640 Set up pyramid.testing and return the Configurator.
1641 """
1641 """
1642 from rhodecode.lib.base import bootstrap_config
1642 from rhodecode.lib.base import bootstrap_config
1643 config = bootstrap_config(request=request_stub)
1643 config = bootstrap_config(request=request_stub)
1644
1644
1645 @request.addfinalizer
1645 @request.addfinalizer
1646 def cleanup():
1646 def cleanup():
1647 pyramid.testing.tearDown()
1647 pyramid.testing.tearDown()
1648
1648
1649 return config
1649 return config
1650
1650
1651
1651
1652 @pytest.fixture
1652 @pytest.fixture()
1653 def StubIntegrationType():
1653 def StubIntegrationType():
1654 class _StubIntegrationType(IntegrationTypeBase):
1654 class _StubIntegrationType(IntegrationTypeBase):
1655 """ Test integration type class """
1655 """ Test integration type class """
1656
1656
1657 key = 'test'
1657 key = 'test'
1658 display_name = 'Test integration type'
1658 display_name = 'Test integration type'
1659 description = 'A test integration type for testing'
1659 description = 'A test integration type for testing'
1660
1660
1661 @classmethod
1661 @classmethod
1662 def icon(cls):
1662 def icon(cls):
1663 return 'test_icon_html_image'
1663 return 'test_icon_html_image'
1664
1664
1665 def __init__(self, settings):
1665 def __init__(self, settings):
1666 super(_StubIntegrationType, self).__init__(settings)
1666 super(_StubIntegrationType, self).__init__(settings)
1667 self.sent_events = [] # for testing
1667 self.sent_events = [] # for testing
1668
1668
1669 def send_event(self, event):
1669 def send_event(self, event):
1670 self.sent_events.append(event)
1670 self.sent_events.append(event)
1671
1671
1672 def settings_schema(self):
1672 def settings_schema(self):
1673 class SettingsSchema(colander.Schema):
1673 class SettingsSchema(colander.Schema):
1674 test_string_field = colander.SchemaNode(
1674 test_string_field = colander.SchemaNode(
1675 colander.String(),
1675 colander.String(),
1676 missing=colander.required,
1676 missing=colander.required,
1677 title='test string field',
1677 title='test string field',
1678 )
1678 )
1679 test_int_field = colander.SchemaNode(
1679 test_int_field = colander.SchemaNode(
1680 colander.Int(),
1680 colander.Int(),
1681 title='some integer setting',
1681 title='some integer setting',
1682 )
1682 )
1683 return SettingsSchema()
1683 return SettingsSchema()
1684
1684
1685
1685
1686 integration_type_registry.register_integration_type(_StubIntegrationType)
1686 integration_type_registry.register_integration_type(_StubIntegrationType)
1687 return _StubIntegrationType
1687 return _StubIntegrationType
1688
1688
1689 @pytest.fixture
1689 @pytest.fixture()
1690 def stub_integration_settings():
1690 def stub_integration_settings():
1691 return {
1691 return {
1692 'test_string_field': 'some data',
1692 'test_string_field': 'some data',
1693 'test_int_field': 100,
1693 'test_int_field': 100,
1694 }
1694 }
1695
1695
1696
1696
1697 @pytest.fixture
1697 @pytest.fixture()
1698 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1698 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1699 stub_integration_settings):
1699 stub_integration_settings):
1700 integration = IntegrationModel().create(
1700 integration = IntegrationModel().create(
1701 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1701 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1702 name='test repo integration',
1702 name='test repo integration',
1703 repo=repo_stub, repo_group=None, child_repos_only=None)
1703 repo=repo_stub, repo_group=None, child_repos_only=None)
1704
1704
1705 @request.addfinalizer
1705 @request.addfinalizer
1706 def cleanup():
1706 def cleanup():
1707 IntegrationModel().delete(integration)
1707 IntegrationModel().delete(integration)
1708
1708
1709 return integration
1709 return integration
1710
1710
1711
1711
1712 @pytest.fixture
1712 @pytest.fixture()
1713 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1713 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1714 stub_integration_settings):
1714 stub_integration_settings):
1715 integration = IntegrationModel().create(
1715 integration = IntegrationModel().create(
1716 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1716 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1717 name='test repogroup integration',
1717 name='test repogroup integration',
1718 repo=None, repo_group=test_repo_group, child_repos_only=True)
1718 repo=None, repo_group=test_repo_group, child_repos_only=True)
1719
1719
1720 @request.addfinalizer
1720 @request.addfinalizer
1721 def cleanup():
1721 def cleanup():
1722 IntegrationModel().delete(integration)
1722 IntegrationModel().delete(integration)
1723
1723
1724 return integration
1724 return integration
1725
1725
1726
1726
1727 @pytest.fixture
1727 @pytest.fixture()
1728 def repogroup_recursive_integration_stub(request, test_repo_group,
1728 def repogroup_recursive_integration_stub(request, test_repo_group,
1729 StubIntegrationType, stub_integration_settings):
1729 StubIntegrationType, stub_integration_settings):
1730 integration = IntegrationModel().create(
1730 integration = IntegrationModel().create(
1731 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1731 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1732 name='test recursive repogroup integration',
1732 name='test recursive repogroup integration',
1733 repo=None, repo_group=test_repo_group, child_repos_only=False)
1733 repo=None, repo_group=test_repo_group, child_repos_only=False)
1734
1734
1735 @request.addfinalizer
1735 @request.addfinalizer
1736 def cleanup():
1736 def cleanup():
1737 IntegrationModel().delete(integration)
1737 IntegrationModel().delete(integration)
1738
1738
1739 return integration
1739 return integration
1740
1740
1741
1741
1742 @pytest.fixture
1742 @pytest.fixture()
1743 def global_integration_stub(request, StubIntegrationType,
1743 def global_integration_stub(request, StubIntegrationType,
1744 stub_integration_settings):
1744 stub_integration_settings):
1745 integration = IntegrationModel().create(
1745 integration = IntegrationModel().create(
1746 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1746 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1747 name='test global integration',
1747 name='test global integration',
1748 repo=None, repo_group=None, child_repos_only=None)
1748 repo=None, repo_group=None, child_repos_only=None)
1749
1749
1750 @request.addfinalizer
1750 @request.addfinalizer
1751 def cleanup():
1751 def cleanup():
1752 IntegrationModel().delete(integration)
1752 IntegrationModel().delete(integration)
1753
1753
1754 return integration
1754 return integration
1755
1755
1756
1756
1757 @pytest.fixture
1757 @pytest.fixture()
1758 def root_repos_integration_stub(request, StubIntegrationType,
1758 def root_repos_integration_stub(request, StubIntegrationType,
1759 stub_integration_settings):
1759 stub_integration_settings):
1760 integration = IntegrationModel().create(
1760 integration = IntegrationModel().create(
1761 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1761 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1762 name='test global integration',
1762 name='test global integration',
1763 repo=None, repo_group=None, child_repos_only=True)
1763 repo=None, repo_group=None, child_repos_only=True)
1764
1764
1765 @request.addfinalizer
1765 @request.addfinalizer
1766 def cleanup():
1766 def cleanup():
1767 IntegrationModel().delete(integration)
1767 IntegrationModel().delete(integration)
1768
1768
1769 return integration
1769 return integration
1770
1770
1771
1771
1772 @pytest.fixture
1772 @pytest.fixture()
1773 def local_dt_to_utc():
1773 def local_dt_to_utc():
1774 def _factory(dt):
1774 def _factory(dt):
1775 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1775 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1776 dateutil.tz.tzutc()).replace(tzinfo=None)
1776 dateutil.tz.tzutc()).replace(tzinfo=None)
1777 return _factory
1777 return _factory
1778
1778
1779
1779
1780 @pytest.fixture
1780 @pytest.fixture()
1781 def disable_anonymous_user(request, baseapp):
1781 def disable_anonymous_user(request, baseapp):
1782 set_anonymous_access(False)
1782 set_anonymous_access(False)
1783
1783
1784 @request.addfinalizer
1784 @request.addfinalizer
1785 def cleanup():
1785 def cleanup():
1786 set_anonymous_access(True)
1786 set_anonymous_access(True)
1787
1787
1788
1788
1789 @pytest.fixture(scope='module')
1789 @pytest.fixture(scope='module')
1790 def rc_fixture(request):
1790 def rc_fixture(request):
1791 return Fixture()
1791 return Fixture()
1792
1792
1793
1793
1794 @pytest.fixture
1794 @pytest.fixture()
1795 def repo_groups(request):
1795 def repo_groups(request):
1796 fixture = Fixture()
1796 fixture = Fixture()
1797
1797
1798 session = Session()
1798 session = Session()
1799 zombie_group = fixture.create_repo_group('zombie')
1799 zombie_group = fixture.create_repo_group('zombie')
1800 parent_group = fixture.create_repo_group('parent')
1800 parent_group = fixture.create_repo_group('parent')
1801 child_group = fixture.create_repo_group('parent/child')
1801 child_group = fixture.create_repo_group('parent/child')
1802 groups_in_db = session.query(RepoGroup).all()
1802 groups_in_db = session.query(RepoGroup).all()
1803 assert len(groups_in_db) == 3
1803 assert len(groups_in_db) == 3
1804 assert child_group.group_parent_id == parent_group.group_id
1804 assert child_group.group_parent_id == parent_group.group_id
1805
1805
1806 @request.addfinalizer
1806 @request.addfinalizer
1807 def cleanup():
1807 def cleanup():
1808 fixture.destroy_repo_group(zombie_group)
1808 fixture.destroy_repo_group(zombie_group)
1809 fixture.destroy_repo_group(child_group)
1809 fixture.destroy_repo_group(child_group)
1810 fixture.destroy_repo_group(parent_group)
1810 fixture.destroy_repo_group(parent_group)
1811
1811
1812 return zombie_group, parent_group, child_group
1812 return zombie_group, parent_group, child_group
1813
1813
1814
1814
1815 @pytest.fixture(scope="session")
1815 @pytest.fixture(scope="session")
1816 def tmp_path_factory(request):
1816 def tmp_path_factory(request):
1817 """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session.
1817 """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session.
1818 """
1818 """
1819
1819
1820 class TempPathFactory:
1820 class TempPathFactory:
1821
1821
1822 def mktemp(self, basename):
1822 def mktemp(self, basename):
1823 import tempfile
1823 import tempfile
1824 return tempfile.mktemp(basename)
1824 return tempfile.mktemp(basename)
1825
1825
1826 return TempPathFactory()
1826 return TempPathFactory()
@@ -1,293 +1,293 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import platform
22 import platform
23 import socket
23 import socket
24
24
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib.pyramid_utils import get_app_config
27 from rhodecode.lib.pyramid_utils import get_app_config
28 from rhodecode.tests.fixture import TestINI
28 from rhodecode.tests.fixture import TestINI
29 from rhodecode.tests.server_utils import RcVCSServer
29 from rhodecode.tests.server_utils import RcVCSServer
30
30
31
31
32 def _parse_json(value):
32 def _parse_json(value):
33 return json.loads(value) if value else None
33 return json.loads(value) if value else None
34
34
35
35
36 def pytest_addoption(parser):
36 def pytest_addoption(parser):
37 parser.addoption(
37 parser.addoption(
38 '--test-loglevel', dest='test_loglevel',
38 '--test-loglevel', dest='test_loglevel',
39 help="Set default Logging level for tests, warn (default), info, debug")
39 help="Set default Logging level for tests, warn (default), info, debug")
40 group = parser.getgroup('pylons')
40 group = parser.getgroup('pylons')
41 group.addoption(
41 group.addoption(
42 '--with-pylons', dest='pyramid_config',
42 '--with-pylons', dest='pyramid_config',
43 help="Set up a Pylons environment with the specified config file.")
43 help="Set up a Pylons environment with the specified config file.")
44 group.addoption(
44 group.addoption(
45 '--ini-config-override', action='store', type=_parse_json,
45 '--ini-config-override', action='store', type=_parse_json,
46 default=None, dest='pyramid_config_override', help=(
46 default=None, dest='pyramid_config_override', help=(
47 "Overrides the .ini file settings. Should be specified in JSON"
47 "Overrides the .ini file settings. Should be specified in JSON"
48 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
48 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
49 )
49 )
50 )
50 )
51 parser.addini(
51 parser.addini(
52 'pyramid_config',
52 'pyramid_config',
53 "Set up a Pyramid environment with the specified config file.")
53 "Set up a Pyramid environment with the specified config file.")
54
54
55 vcsgroup = parser.getgroup('vcs')
55 vcsgroup = parser.getgroup('vcs')
56 vcsgroup.addoption(
56 vcsgroup.addoption(
57 '--without-vcsserver', dest='with_vcsserver', action='store_false',
57 '--without-vcsserver', dest='with_vcsserver', action='store_false',
58 help="Do not start the VCSServer in a background process.")
58 help="Do not start the VCSServer in a background process.")
59 vcsgroup.addoption(
59 vcsgroup.addoption(
60 '--with-vcsserver-http', dest='vcsserver_config_http',
60 '--with-vcsserver-http', dest='vcsserver_config_http',
61 help="Start the HTTP VCSServer with the specified config file.")
61 help="Start the HTTP VCSServer with the specified config file.")
62 vcsgroup.addoption(
62 vcsgroup.addoption(
63 '--vcsserver-protocol', dest='vcsserver_protocol',
63 '--vcsserver-protocol', dest='vcsserver_protocol',
64 help="Start the VCSServer with HTTP protocol support.")
64 help="Start the VCSServer with HTTP protocol support.")
65 vcsgroup.addoption(
65 vcsgroup.addoption(
66 '--vcsserver-config-override', action='store', type=_parse_json,
66 '--vcsserver-config-override', action='store', type=_parse_json,
67 default=None, dest='vcsserver_config_override', help=(
67 default=None, dest='vcsserver_config_override', help=(
68 "Overrides the .ini file settings for the VCSServer. "
68 "Overrides the .ini file settings for the VCSServer. "
69 "Should be specified in JSON "
69 "Should be specified in JSON "
70 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
70 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
71 )
71 )
72 )
72 )
73 vcsgroup.addoption(
73 vcsgroup.addoption(
74 '--vcsserver-port', action='store', type=int,
74 '--vcsserver-port', action='store', type=int,
75 default=None, help=(
75 default=None, help=(
76 "Allows to set the port of the vcsserver. Useful when testing "
76 "Allows to set the port of the vcsserver. Useful when testing "
77 "against an already running server and random ports cause "
77 "against an already running server and random ports cause "
78 "trouble."))
78 "trouble."))
79 parser.addini(
79 parser.addini(
80 'vcsserver_config_http',
80 'vcsserver_config_http',
81 "Start the HTTP VCSServer with the specified config file.")
81 "Start the HTTP VCSServer with the specified config file.")
82 parser.addini(
82 parser.addini(
83 'vcsserver_protocol',
83 'vcsserver_protocol',
84 "Start the VCSServer with HTTP protocol support.")
84 "Start the VCSServer with HTTP protocol support.")
85
85
86
86
87 @pytest.fixture(scope='session')
87 @pytest.fixture(scope='session')
88 def vcsserver(request, vcsserver_port, vcsserver_factory):
88 def vcsserver(request, vcsserver_port, vcsserver_factory):
89 """
89 """
90 Session scope VCSServer.
90 Session scope VCSServer.
91
91
92 Tests wich need the VCSServer have to rely on this fixture in order
92 Tests wich need the VCSServer have to rely on this fixture in order
93 to ensure it will be running.
93 to ensure it will be running.
94
94
95 For specific needs, the fixture vcsserver_factory can be used. It allows to
95 For specific needs, the fixture vcsserver_factory can be used. It allows to
96 adjust the configuration file for the test run.
96 adjust the configuration file for the test run.
97
97
98 Command line args:
98 Command line args:
99
99
100 --without-vcsserver: Allows to switch this fixture off. You have to
100 --without-vcsserver: Allows to switch this fixture off. You have to
101 manually start the server.
101 manually start the server.
102
102
103 --vcsserver-port: Will expect the VCSServer to listen on this port.
103 --vcsserver-port: Will expect the VCSServer to listen on this port.
104 """
104 """
105
105
106 if not request.config.getoption('with_vcsserver'):
106 if not request.config.getoption('with_vcsserver'):
107 return None
107 return None
108
108
109 return vcsserver_factory(
109 return vcsserver_factory(
110 request, vcsserver_port=vcsserver_port)
110 request, vcsserver_port=vcsserver_port)
111
111
112
112
113 @pytest.fixture(scope='session')
113 @pytest.fixture(scope='session')
114 def vcsserver_factory(tmpdir_factory):
114 def vcsserver_factory(tmpdir_factory):
115 """
115 """
116 Use this if you need a running vcsserver with a special configuration.
116 Use this if you need a running vcsserver with a special configuration.
117 """
117 """
118
118
119 def factory(request, overrides=(), vcsserver_port=None,
119 def factory(request, overrides=(), vcsserver_port=None,
120 log_file=None):
120 log_file=None):
121
121
122 if vcsserver_port is None:
122 if vcsserver_port is None:
123 vcsserver_port = get_available_port()
123 vcsserver_port = get_available_port()
124
124
125 overrides = list(overrides)
125 overrides = list(overrides)
126 overrides.append({'server:main': {'port': vcsserver_port}})
126 overrides.append({'server:main': {'port': vcsserver_port}})
127
127
128 option_name = 'vcsserver_config_http'
128 option_name = 'vcsserver_config_http'
129 override_option_name = 'vcsserver_config_override'
129 override_option_name = 'vcsserver_config_override'
130 config_file = get_config(
130 config_file = get_config(
131 request.config, option_name=option_name,
131 request.config, option_name=option_name,
132 override_option_name=override_option_name, overrides=overrides,
132 override_option_name=override_option_name, overrides=overrides,
133 basetemp=tmpdir_factory.getbasetemp().strpath,
133 basetemp=tmpdir_factory.getbasetemp().strpath,
134 prefix='test_vcs_')
134 prefix='test_vcs_')
135
135
136 server = RcVCSServer(config_file, log_file)
136 server = RcVCSServer(config_file, log_file)
137 server.start()
137 server.start()
138
138
139 @request.addfinalizer
139 @request.addfinalizer
140 def cleanup():
140 def cleanup():
141 server.shutdown()
141 server.shutdown()
142
142
143 server.wait_until_ready()
143 server.wait_until_ready()
144 return server
144 return server
145
145
146 return factory
146 return factory
147
147
148
148
149 def is_cygwin():
149 def is_cygwin():
150 return 'cygwin' in platform.system().lower()
150 return 'cygwin' in platform.system().lower()
151
151
152
152
153 def _use_log_level(config):
153 def _use_log_level(config):
154 level = config.getoption('test_loglevel') or 'warn'
154 level = config.getoption('test_loglevel') or 'warn'
155 return level.upper()
155 return level.upper()
156
156
157
157
158 @pytest.fixture(scope='session')
158 @pytest.fixture(scope='session')
159 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
159 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
160 option_name = 'pyramid_config'
160 option_name = 'pyramid_config'
161 log_level = _use_log_level(request.config)
161 log_level = _use_log_level(request.config)
162
162
163 overrides = [
163 overrides = [
164 {'server:main': {'port': rcserver_port}},
164 {'server:main': {'port': rcserver_port}},
165 {'app:main': {
165 {'app:main': {
166 'vcs.server': 'localhost:%s' % vcsserver_port,
166 'vcs.server': 'localhost:%s' % vcsserver_port,
167 # johbo: We will always start the VCSServer on our own based on the
167 # johbo: We will always start the VCSServer on our own based on the
168 # fixtures of the test cases. For the test run it must always be
168 # fixtures of the test cases. For the test run it must always be
169 # off in the INI file.
169 # off in the INI file.
170 'vcs.start_server': 'false',
170 'vcs.start_server': 'false',
171
171
172 'vcs.server.protocol': 'http',
172 'vcs.server.protocol': 'http',
173 'vcs.scm_app_implementation': 'http',
173 'vcs.scm_app_implementation': 'http',
174 'vcs.hooks.protocol': 'http',
174 'vcs.hooks.protocol': 'http',
175 'vcs.hooks.host': '127.0.0.1',
175 'vcs.hooks.host': '127.0.0.1',
176 }},
176 }},
177
177
178 {'handler_console': {
178 {'handler_console': {
179 'class ': 'StreamHandler',
179 'class ': 'StreamHandler',
180 'args ': '(sys.stderr,)',
180 'args ': '(sys.stderr,)',
181 'level': log_level,
181 'level': log_level,
182 }},
182 }},
183
183
184 ]
184 ]
185
185
186 filename = get_config(
186 filename = get_config(
187 request.config, option_name=option_name,
187 request.config, option_name=option_name,
188 override_option_name='{}_override'.format(option_name),
188 override_option_name='{}_override'.format(option_name),
189 overrides=overrides,
189 overrides=overrides,
190 basetemp=tmpdir_factory.getbasetemp().strpath,
190 basetemp=tmpdir_factory.getbasetemp().strpath,
191 prefix='test_rce_')
191 prefix='test_rce_')
192 return filename
192 return filename
193
193
194
194
195 @pytest.fixture(scope='session')
195 @pytest.fixture(scope='session')
196 def ini_settings(ini_config):
196 def ini_settings(ini_config):
197 ini_path = ini_config
197 ini_path = ini_config
198 return get_app_config(ini_path)
198 return get_app_config(ini_path)
199
199
200
200
201 def get_available_port():
201 def get_available_port():
202 family = socket.AF_INET
202 family = socket.AF_INET
203 socktype = socket.SOCK_STREAM
203 socktype = socket.SOCK_STREAM
204 host = '127.0.0.1'
204 host = '127.0.0.1'
205
205
206 mysocket = socket.socket(family, socktype)
206 mysocket = socket.socket(family, socktype)
207 mysocket.bind((host, 0))
207 mysocket.bind((host, 0))
208 port = mysocket.getsockname()[1]
208 port = mysocket.getsockname()[1]
209 mysocket.close()
209 mysocket.close()
210 del mysocket
210 del mysocket
211 return port
211 return port
212
212
213
213
214 @pytest.fixture(scope='session')
214 @pytest.fixture(scope='session')
215 def rcserver_port(request):
215 def rcserver_port(request):
216 port = get_available_port()
216 port = get_available_port()
217 print('Using rcserver port {}'.format(port))
217 print('Using rcserver port {}'.format(port))
218 return port
218 return port
219
219
220
220
221 @pytest.fixture(scope='session')
221 @pytest.fixture(scope='session')
222 def vcsserver_port(request):
222 def vcsserver_port(request):
223 port = request.config.getoption('--vcsserver-port')
223 port = request.config.getoption('--vcsserver-port')
224 if port is None:
224 if port is None:
225 port = get_available_port()
225 port = get_available_port()
226 print('Using vcsserver port {}'.format(port))
226 print('Using vcsserver port {}'.format(port))
227 return port
227 return port
228
228
229
229
230 @pytest.fixture(scope='session')
230 @pytest.fixture(scope='session')
231 def available_port_factory():
231 def available_port_factory():
232 """
232 """
233 Returns a callable which returns free port numbers.
233 Returns a callable which returns free port numbers.
234 """
234 """
235 return get_available_port
235 return get_available_port
236
236
237
237
238 @pytest.fixture
238 @pytest.fixture()
239 def available_port(available_port_factory):
239 def available_port(available_port_factory):
240 """
240 """
241 Gives you one free port for the current test.
241 Gives you one free port for the current test.
242
242
243 Uses "available_port_factory" to retrieve the port.
243 Uses "available_port_factory" to retrieve the port.
244 """
244 """
245 return available_port_factory()
245 return available_port_factory()
246
246
247
247
248 @pytest.fixture(scope='session')
248 @pytest.fixture(scope='session')
249 def testini_factory(tmpdir_factory, ini_config):
249 def testini_factory(tmpdir_factory, ini_config):
250 """
250 """
251 Factory to create an INI file based on TestINI.
251 Factory to create an INI file based on TestINI.
252
252
253 It will make sure to place the INI file in the correct directory.
253 It will make sure to place the INI file in the correct directory.
254 """
254 """
255 basetemp = tmpdir_factory.getbasetemp().strpath
255 basetemp = tmpdir_factory.getbasetemp().strpath
256 return TestIniFactory(basetemp, ini_config)
256 return TestIniFactory(basetemp, ini_config)
257
257
258
258
259 class TestIniFactory(object):
259 class TestIniFactory(object):
260
260
261 def __init__(self, basetemp, template_ini):
261 def __init__(self, basetemp, template_ini):
262 self._basetemp = basetemp
262 self._basetemp = basetemp
263 self._template_ini = template_ini
263 self._template_ini = template_ini
264
264
265 def __call__(self, ini_params, new_file_prefix='test'):
265 def __call__(self, ini_params, new_file_prefix='test'):
266 ini_file = TestINI(
266 ini_file = TestINI(
267 self._template_ini, ini_params=ini_params,
267 self._template_ini, ini_params=ini_params,
268 new_file_prefix=new_file_prefix, dir=self._basetemp)
268 new_file_prefix=new_file_prefix, dir=self._basetemp)
269 result = ini_file.create()
269 result = ini_file.create()
270 return result
270 return result
271
271
272
272
273 def get_config(
273 def get_config(
274 config, option_name, override_option_name, overrides=None,
274 config, option_name, override_option_name, overrides=None,
275 basetemp=None, prefix='test'):
275 basetemp=None, prefix='test'):
276 """
276 """
277 Find a configuration file and apply overrides for the given `prefix`.
277 Find a configuration file and apply overrides for the given `prefix`.
278 """
278 """
279 config_file = (
279 config_file = (
280 config.getoption(option_name) or config.getini(option_name))
280 config.getoption(option_name) or config.getini(option_name))
281 if not config_file:
281 if not config_file:
282 pytest.exit(
282 pytest.exit(
283 "Configuration error, could not extract {}.".format(option_name))
283 "Configuration error, could not extract {}.".format(option_name))
284
284
285 overrides = overrides or []
285 overrides = overrides or []
286 config_override = config.getoption(override_option_name)
286 config_override = config.getoption(override_option_name)
287 if config_override:
287 if config_override:
288 overrides.append(config_override)
288 overrides.append(config_override)
289 temp_ini_file = TestINI(
289 temp_ini_file = TestINI(
290 config_file, ini_params=overrides, new_file_prefix=prefix,
290 config_file, ini_params=overrides, new_file_prefix=prefix,
291 dir=basetemp)
291 dir=basetemp)
292
292
293 return temp_ini_file.create()
293 return temp_ini_file.create()
@@ -1,256 +1,256 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import shutil
22 import shutil
23 import datetime
23 import datetime
24
24
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import Config
28 from rhodecode.lib.vcs.backends.base import Config
29 from rhodecode.lib.vcs.nodes import FileNode
29 from rhodecode.lib.vcs.nodes import FileNode
30 from rhodecode.tests import get_new_dir
30 from rhodecode.tests import get_new_dir
31 from rhodecode.tests.utils import check_skip_backends, check_xfail_backends
31 from rhodecode.tests.utils import check_skip_backends, check_xfail_backends
32
32
33
33
34 @pytest.fixture()
34 @pytest.fixture()
35 def vcs_repository_support(
35 def vcs_repository_support(
36 request, backend_alias, baseapp, _vcs_repo_container):
36 request, backend_alias, baseapp, _vcs_repo_container):
37 """
37 """
38 Provide a test repository for the test run.
38 Provide a test repository for the test run.
39
39
40 Depending on the value of `recreate_repo_per_test` a new repo for each
40 Depending on the value of `recreate_repo_per_test` a new repo for each
41 test will be created.
41 test will be created.
42
42
43 The parameter `--backends` can be used to limit this fixture to specific
43 The parameter `--backends` can be used to limit this fixture to specific
44 backend implementations.
44 backend implementations.
45 """
45 """
46 cls = request.cls
46 cls = request.cls
47
47
48 check_skip_backends(request.node, backend_alias)
48 check_skip_backends(request.node, backend_alias)
49 check_xfail_backends(request.node, backend_alias)
49 check_xfail_backends(request.node, backend_alias)
50
50
51 if _should_create_repo_per_test(cls):
51 if _should_create_repo_per_test(cls):
52 _vcs_repo_container = _create_vcs_repo_container(request)
52 _vcs_repo_container = _create_vcs_repo_container(request)
53
53
54 repo = _vcs_repo_container.get_repo(cls, backend_alias=backend_alias)
54 repo = _vcs_repo_container.get_repo(cls, backend_alias=backend_alias)
55
55
56 # TODO: johbo: Supporting old test class api, think about removing this
56 # TODO: johbo: Supporting old test class api, think about removing this
57 cls.repo = repo
57 cls.repo = repo
58 cls.repo_path = repo.path
58 cls.repo_path = repo.path
59 cls.default_branch = repo.DEFAULT_BRANCH_NAME
59 cls.default_branch = repo.DEFAULT_BRANCH_NAME
60 cls.Backend = cls.backend_class = repo.__class__
60 cls.Backend = cls.backend_class = repo.__class__
61 cls.imc = repo.in_memory_commit
61 cls.imc = repo.in_memory_commit
62
62
63 return backend_alias, repo
63 return backend_alias, repo
64
64
65
65
66 @pytest.fixture(scope='class')
66 @pytest.fixture(scope='class')
67 def _vcs_repo_container(request):
67 def _vcs_repo_container(request):
68 """
68 """
69 Internal fixture intended to help support class based scoping on demand.
69 Internal fixture intended to help support class based scoping on demand.
70 """
70 """
71 return _create_vcs_repo_container(request)
71 return _create_vcs_repo_container(request)
72
72
73
73
74 def _create_vcs_repo_container(request):
74 def _create_vcs_repo_container(request):
75 repo_container = VcsRepoContainer()
75 repo_container = VcsRepoContainer()
76 if not request.config.getoption('--keep-tmp-path'):
76 if not request.config.getoption('--keep-tmp-path'):
77 request.addfinalizer(repo_container.cleanup)
77 request.addfinalizer(repo_container.cleanup)
78 return repo_container
78 return repo_container
79
79
80
80
81 class VcsRepoContainer(object):
81 class VcsRepoContainer(object):
82
82
83 def __init__(self):
83 def __init__(self):
84 self._cleanup_paths = []
84 self._cleanup_paths = []
85 self._repos = {}
85 self._repos = {}
86
86
87 def get_repo(self, test_class, backend_alias):
87 def get_repo(self, test_class, backend_alias):
88 if backend_alias not in self._repos:
88 if backend_alias not in self._repos:
89 repo = _create_empty_repository(test_class, backend_alias)
89 repo = _create_empty_repository(test_class, backend_alias)
90
90
91 self._cleanup_paths.append(repo.path)
91 self._cleanup_paths.append(repo.path)
92 self._repos[backend_alias] = repo
92 self._repos[backend_alias] = repo
93 return self._repos[backend_alias]
93 return self._repos[backend_alias]
94
94
95 def cleanup(self):
95 def cleanup(self):
96 for repo_path in reversed(self._cleanup_paths):
96 for repo_path in reversed(self._cleanup_paths):
97 shutil.rmtree(repo_path)
97 shutil.rmtree(repo_path)
98
98
99
99
100 def _should_create_repo_per_test(cls):
100 def _should_create_repo_per_test(cls):
101 return getattr(cls, 'recreate_repo_per_test', False)
101 return getattr(cls, 'recreate_repo_per_test', False)
102
102
103
103
104 def _create_empty_repository(cls, backend_alias=None):
104 def _create_empty_repository(cls, backend_alias=None):
105 Backend = get_backend(backend_alias or cls.backend_alias)
105 Backend = get_backend(backend_alias or cls.backend_alias)
106 repo_path = get_new_dir(str(time.time()))
106 repo_path = get_new_dir(str(time.time()))
107 repo = Backend(repo_path, create=True)
107 repo = Backend(repo_path, create=True)
108 if hasattr(cls, '_get_commits'):
108 if hasattr(cls, '_get_commits'):
109 commits = cls._get_commits()
109 commits = cls._get_commits()
110 cls.tip = _add_commits_to_repo(repo, commits)
110 cls.tip = _add_commits_to_repo(repo, commits)
111
111
112 return repo
112 return repo
113
113
114
114
115 @pytest.fixture
115 @pytest.fixture()
116 def config():
116 def config():
117 """
117 """
118 Instance of a repository config.
118 Instance of a repository config.
119
119
120 The instance contains only one value:
120 The instance contains only one value:
121
121
122 - Section: "section-a"
122 - Section: "section-a"
123 - Key: "a-1"
123 - Key: "a-1"
124 - Value: "value-a-1"
124 - Value: "value-a-1"
125
125
126 The intended usage is for cases where a config instance is needed but no
126 The intended usage is for cases where a config instance is needed but no
127 specific content is required.
127 specific content is required.
128 """
128 """
129 config = Config()
129 config = Config()
130 config.set('section-a', 'a-1', 'value-a-1')
130 config.set('section-a', 'a-1', 'value-a-1')
131 return config
131 return config
132
132
133
133
134 def _add_commits_to_repo(repo, commits):
134 def _add_commits_to_repo(repo, commits):
135 imc = repo.in_memory_commit
135 imc = repo.in_memory_commit
136 tip = None
136 tip = None
137
137
138 for commit in commits:
138 for commit in commits:
139 for node in commit.get('added', []):
139 for node in commit.get('added', []):
140 imc.add(FileNode(node.path, content=node.content))
140 imc.add(FileNode(node.path, content=node.content))
141 for node in commit.get('changed', []):
141 for node in commit.get('changed', []):
142 imc.change(FileNode(node.path, content=node.content))
142 imc.change(FileNode(node.path, content=node.content))
143 for node in commit.get('removed', []):
143 for node in commit.get('removed', []):
144 imc.remove(FileNode(node.path))
144 imc.remove(FileNode(node.path))
145
145
146 tip = imc.commit(
146 tip = imc.commit(
147 message=unicode(commit['message']),
147 message=unicode(commit['message']),
148 author=unicode(commit['author']),
148 author=unicode(commit['author']),
149 date=commit['date'],
149 date=commit['date'],
150 branch=commit.get('branch'))
150 branch=commit.get('branch'))
151 return tip
151 return tip
152
152
153
153
154 @pytest.fixture
154 @pytest.fixture()
155 def vcs_repo(request, backend_alias):
155 def vcs_repo(request, backend_alias):
156 Backend = get_backend(backend_alias)
156 Backend = get_backend(backend_alias)
157 repo_path = get_new_dir(str(time.time()))
157 repo_path = get_new_dir(str(time.time()))
158 repo = Backend(repo_path, create=True)
158 repo = Backend(repo_path, create=True)
159
159
160 @request.addfinalizer
160 @request.addfinalizer
161 def cleanup():
161 def cleanup():
162 shutil.rmtree(repo_path)
162 shutil.rmtree(repo_path)
163
163
164 return repo
164 return repo
165
165
166
166
167 @pytest.fixture
167 @pytest.fixture()
168 def generate_repo_with_commits(vcs_repo):
168 def generate_repo_with_commits(vcs_repo):
169 """
169 """
170 Creates a fabric to generate N comits with some file nodes on a randomly
170 Creates a fabric to generate N comits with some file nodes on a randomly
171 generated repository
171 generated repository
172 """
172 """
173
173
174 def commit_generator(num):
174 def commit_generator(num):
175 start_date = datetime.datetime(2010, 1, 1, 20)
175 start_date = datetime.datetime(2010, 1, 1, 20)
176 for x in xrange(num):
176 for x in xrange(num):
177 yield {
177 yield {
178 'message': 'Commit %d' % x,
178 'message': 'Commit %d' % x,
179 'author': 'Joe Doe <joe.doe@example.com>',
179 'author': 'Joe Doe <joe.doe@example.com>',
180 'date': start_date + datetime.timedelta(hours=12 * x),
180 'date': start_date + datetime.timedelta(hours=12 * x),
181 'added': [
181 'added': [
182 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
182 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
183 ],
183 ],
184 'modified': [
184 'modified': [
185 FileNode('file_%d.txt' % x,
185 FileNode('file_%d.txt' % x,
186 content='Foobar %d modified' % (x-1)),
186 content='Foobar %d modified' % (x-1)),
187 ]
187 ]
188 }
188 }
189
189
190 def commit_maker(num=5):
190 def commit_maker(num=5):
191 _add_commits_to_repo(vcs_repo, commit_generator(num))
191 _add_commits_to_repo(vcs_repo, commit_generator(num))
192 return vcs_repo
192 return vcs_repo
193
193
194 return commit_maker
194 return commit_maker
195
195
196
196
197 @pytest.fixture
197 @pytest.fixture()
198 def hg_repo(request, vcs_repo):
198 def hg_repo(request, vcs_repo):
199 repo = vcs_repo
199 repo = vcs_repo
200
200
201 commits = repo._get_commits()
201 commits = repo._get_commits()
202 _add_commits_to_repo(repo, commits)
202 _add_commits_to_repo(repo, commits)
203
203
204 return repo
204 return repo
205
205
206
206
207 @pytest.fixture
207 @pytest.fixture()
208 def hg_commit(hg_repo):
208 def hg_commit(hg_repo):
209 return hg_repo.get_commit()
209 return hg_repo.get_commit()
210
210
211
211
212 class BackendTestMixin(object):
212 class BackendTestMixin(object):
213 """
213 """
214 This is a backend independent test case class which should be created
214 This is a backend independent test case class which should be created
215 with ``type`` method.
215 with ``type`` method.
216
216
217 It is required to set following attributes at subclass:
217 It is required to set following attributes at subclass:
218
218
219 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
219 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
220 - ``repo_path``: path to the repository which would be created for set of
220 - ``repo_path``: path to the repository which would be created for set of
221 tests
221 tests
222 - ``recreate_repo_per_test``: If set to ``False``, repo would NOT be
222 - ``recreate_repo_per_test``: If set to ``False``, repo would NOT be
223 created
223 created
224 before every single test. Defaults to ``True``.
224 before every single test. Defaults to ``True``.
225 """
225 """
226 recreate_repo_per_test = True
226 recreate_repo_per_test = True
227
227
228 @classmethod
228 @classmethod
229 def _get_commits(cls):
229 def _get_commits(cls):
230 commits = [
230 commits = [
231 {
231 {
232 'message': u'Initial commit',
232 'message': u'Initial commit',
233 'author': u'Joe Doe <joe.doe@example.com>',
233 'author': u'Joe Doe <joe.doe@example.com>',
234 'date': datetime.datetime(2010, 1, 1, 20),
234 'date': datetime.datetime(2010, 1, 1, 20),
235 'added': [
235 'added': [
236 FileNode('foobar', content='Foobar'),
236 FileNode('foobar', content='Foobar'),
237 FileNode('foobar2', content='Foobar II'),
237 FileNode('foobar2', content='Foobar II'),
238 FileNode('foo/bar/baz', content='baz here!'),
238 FileNode('foo/bar/baz', content='baz here!'),
239 ],
239 ],
240 },
240 },
241 {
241 {
242 'message': u'Changes...',
242 'message': u'Changes...',
243 'author': u'Jane Doe <jane.doe@example.com>',
243 'author': u'Jane Doe <jane.doe@example.com>',
244 'date': datetime.datetime(2010, 1, 1, 21),
244 'date': datetime.datetime(2010, 1, 1, 21),
245 'added': [
245 'added': [
246 FileNode('some/new.txt', content='news...'),
246 FileNode('some/new.txt', content='news...'),
247 ],
247 ],
248 'changed': [
248 'changed': [
249 FileNode('foobar', 'Foobar I'),
249 FileNode('foobar', 'Foobar I'),
250 ],
250 ],
251 'removed': [],
251 'removed': [],
252 },
252 },
253 ]
253 ]
254 return commits
254 return commits
255
255
256
256
@@ -1,151 +1,151 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import os
22 import os
23 import shutil
23 import shutil
24 import tarfile
24 import tarfile
25 import tempfile
25 import tempfile
26 import zipfile
26 import zipfile
27 import StringIO
27 import StringIO
28
28
29 import mock
29 import mock
30 import pytest
30 import pytest
31
31
32 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.backends import base
33 from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError, VCSError
33 from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError, VCSError
34 from rhodecode.lib.vcs.nodes import FileNode
34 from rhodecode.lib.vcs.nodes import FileNode
35 from rhodecode.tests.vcs.conftest import BackendTestMixin
35 from rhodecode.tests.vcs.conftest import BackendTestMixin
36
36
37
37
38 @pytest.mark.usefixtures("vcs_repository_support")
38 @pytest.mark.usefixtures("vcs_repository_support")
39 class TestArchives(BackendTestMixin):
39 class TestArchives(BackendTestMixin):
40
40
41 @pytest.fixture(autouse=True)
41 @pytest.fixture(autouse=True)
42 def tempfile(self, request):
42 def tempfile(self, request):
43 self.temp_file = tempfile.mkstemp()[1]
43 self.temp_file = tempfile.mkstemp()[1]
44
44
45 @request.addfinalizer
45 @request.addfinalizer
46 def cleanup():
46 def cleanup():
47 os.remove(self.temp_file)
47 os.remove(self.temp_file)
48
48
49 @classmethod
49 @classmethod
50 def _get_commits(cls):
50 def _get_commits(cls):
51 start_date = datetime.datetime(2010, 1, 1, 20)
51 start_date = datetime.datetime(2010, 1, 1, 20)
52 for x in range(5):
52 for x in range(5):
53 yield {
53 yield {
54 'message': 'Commit %d' % x,
54 'message': 'Commit %d' % x,
55 'author': 'Joe Doe <joe.doe@example.com>',
55 'author': 'Joe Doe <joe.doe@example.com>',
56 'date': start_date + datetime.timedelta(hours=12 * x),
56 'date': start_date + datetime.timedelta(hours=12 * x),
57 'added': [
57 'added': [
58 FileNode(
58 FileNode(
59 '%d/file_%d.txt' % (x, x), content='Foobar %d' % x),
59 '%d/file_%d.txt' % (x, x), content='Foobar %d' % x),
60 ],
60 ],
61 }
61 }
62
62
63 @pytest.mark.parametrize('compressor', ['gz', 'bz2'])
63 @pytest.mark.parametrize('compressor', ['gz', 'bz2'])
64 def test_archive_tar(self, compressor):
64 def test_archive_tar(self, compressor):
65 self.tip.archive_repo(
65 self.tip.archive_repo(
66 self.temp_file, kind='t' + compressor, prefix='repo')
66 self.temp_file, kind='t' + compressor, prefix='repo')
67 out_dir = tempfile.mkdtemp()
67 out_dir = tempfile.mkdtemp()
68 out_file = tarfile.open(self.temp_file, 'r|' + compressor)
68 out_file = tarfile.open(self.temp_file, 'r|' + compressor)
69 out_file.extractall(out_dir)
69 out_file.extractall(out_dir)
70 out_file.close()
70 out_file.close()
71
71
72 for x in range(5):
72 for x in range(5):
73 node_path = '%d/file_%d.txt' % (x, x)
73 node_path = '%d/file_%d.txt' % (x, x)
74 with open(os.path.join(out_dir, 'repo/' + node_path)) as f:
74 with open(os.path.join(out_dir, 'repo/' + node_path)) as f:
75 file_content = f.read()
75 file_content = f.read()
76 assert file_content == self.tip.get_node(node_path).content
76 assert file_content == self.tip.get_node(node_path).content
77
77
78 shutil.rmtree(out_dir)
78 shutil.rmtree(out_dir)
79
79
80 def test_archive_zip(self):
80 def test_archive_zip(self):
81 self.tip.archive_repo(self.temp_file, kind='zip', prefix='repo')
81 self.tip.archive_repo(self.temp_file, kind='zip', prefix='repo')
82 out = zipfile.ZipFile(self.temp_file)
82 out = zipfile.ZipFile(self.temp_file)
83
83
84 for x in range(5):
84 for x in range(5):
85 node_path = '%d/file_%d.txt' % (x, x)
85 node_path = '%d/file_%d.txt' % (x, x)
86 decompressed = StringIO.StringIO()
86 decompressed = StringIO.StringIO()
87 decompressed.write(out.read('repo/' + node_path))
87 decompressed.write(out.read('repo/' + node_path))
88 assert decompressed.getvalue() == \
88 assert decompressed.getvalue() == \
89 self.tip.get_node(node_path).content
89 self.tip.get_node(node_path).content
90 decompressed.close()
90 decompressed.close()
91
91
92 def test_archive_zip_with_metadata(self):
92 def test_archive_zip_with_metadata(self):
93 self.tip.archive_repo(self.temp_file, kind='zip',
93 self.tip.archive_repo(self.temp_file, kind='zip',
94 prefix='repo', write_metadata=True)
94 prefix='repo', write_metadata=True)
95
95
96 out = zipfile.ZipFile(self.temp_file)
96 out = zipfile.ZipFile(self.temp_file)
97 metafile = out.read('.archival.txt')
97 metafile = out.read('.archival.txt')
98
98
99 raw_id = self.tip.raw_id
99 raw_id = self.tip.raw_id
100 assert 'commit_id:%s' % raw_id in metafile
100 assert 'commit_id:%s' % raw_id in metafile
101
101
102 for x in range(5):
102 for x in range(5):
103 node_path = '%d/file_%d.txt' % (x, x)
103 node_path = '%d/file_%d.txt' % (x, x)
104 decompressed = StringIO.StringIO()
104 decompressed = StringIO.StringIO()
105 decompressed.write(out.read('repo/' + node_path))
105 decompressed.write(out.read('repo/' + node_path))
106 assert decompressed.getvalue() == \
106 assert decompressed.getvalue() == \
107 self.tip.get_node(node_path).content
107 self.tip.get_node(node_path).content
108 decompressed.close()
108 decompressed.close()
109
109
110 def test_archive_wrong_kind(self):
110 def test_archive_wrong_kind(self):
111 with pytest.raises(ImproperArchiveTypeError):
111 with pytest.raises(ImproperArchiveTypeError):
112 self.tip.archive_repo(self.temp_file, kind='wrong kind')
112 self.tip.archive_repo(self.temp_file, kind='wrong kind')
113
113
114
114
115 @pytest.fixture
115 @pytest.fixture()
116 def base_commit():
116 def base_commit():
117 """
117 """
118 Prepare a `base.BaseCommit` just enough for `_validate_archive_prefix`.
118 Prepare a `base.BaseCommit` just enough for `_validate_archive_prefix`.
119 """
119 """
120 commit = base.BaseCommit()
120 commit = base.BaseCommit()
121 commit.repository = mock.Mock()
121 commit.repository = mock.Mock()
122 commit.repository.name = u'fake_repo'
122 commit.repository.name = u'fake_repo'
123 commit.short_id = 'fake_id'
123 commit.short_id = 'fake_id'
124 return commit
124 return commit
125
125
126
126
127 @pytest.mark.parametrize("prefix", [u"unicode-prefix", u"Ünïcödë"])
127 @pytest.mark.parametrize("prefix", [u"unicode-prefix", u"Ünïcödë"])
128 def test_validate_archive_prefix_enforces_bytes_as_prefix(prefix, base_commit):
128 def test_validate_archive_prefix_enforces_bytes_as_prefix(prefix, base_commit):
129 with pytest.raises(ValueError):
129 with pytest.raises(ValueError):
130 base_commit._validate_archive_prefix(prefix)
130 base_commit._validate_archive_prefix(prefix)
131
131
132
132
133 def test_validate_archive_prefix_empty_prefix(base_commit):
133 def test_validate_archive_prefix_empty_prefix(base_commit):
134 # TODO: johbo: Should raise a ValueError here.
134 # TODO: johbo: Should raise a ValueError here.
135 with pytest.raises(VCSError):
135 with pytest.raises(VCSError):
136 base_commit._validate_archive_prefix('')
136 base_commit._validate_archive_prefix('')
137
137
138
138
139 def test_validate_archive_prefix_with_leading_slash(base_commit):
139 def test_validate_archive_prefix_with_leading_slash(base_commit):
140 # TODO: johbo: Should raise a ValueError here.
140 # TODO: johbo: Should raise a ValueError here.
141 with pytest.raises(VCSError):
141 with pytest.raises(VCSError):
142 base_commit._validate_archive_prefix('/any')
142 base_commit._validate_archive_prefix('/any')
143
143
144
144
145 def test_validate_archive_prefix_falls_back_to_repository_name(base_commit):
145 def test_validate_archive_prefix_falls_back_to_repository_name(base_commit):
146 prefix = base_commit._validate_archive_prefix(None)
146 prefix = base_commit._validate_archive_prefix(None)
147 expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format(
147 expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format(
148 repo_name='fake_repo',
148 repo_name='fake_repo',
149 short_id='fake_id')
149 short_id='fake_id')
150 assert isinstance(prefix, str)
150 assert isinstance(prefix, str)
151 assert prefix == expected_prefix
151 assert prefix == expected_prefix
@@ -1,133 +1,133 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 import mock
23 import mock
24 import msgpack
24 import msgpack
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib import vcs
27 from rhodecode.lib import vcs
28 from rhodecode.lib.vcs import client_http, exceptions
28 from rhodecode.lib.vcs import client_http, exceptions
29
29
30
30
31 def is_new_connection(logger, level, message):
31 def is_new_connection(logger, level, message):
32 return (
32 return (
33 logger == 'requests.packages.urllib3.connectionpool' and
33 logger == 'requests.packages.urllib3.connectionpool' and
34 message.startswith('Starting new HTTP'))
34 message.startswith('Starting new HTTP'))
35
35
36
36
37 @pytest.fixture
37 @pytest.fixture()
38 def stub_session():
38 def stub_session():
39 """
39 """
40 Stub of `requests.Session()`.
40 Stub of `requests.Session()`.
41 """
41 """
42 session = mock.Mock()
42 session = mock.Mock()
43 post = session.post()
43 post = session.post()
44 post.content = msgpack.packb({})
44 post.content = msgpack.packb({})
45 post.status_code = 200
45 post.status_code = 200
46
46
47 session.reset_mock()
47 session.reset_mock()
48 return session
48 return session
49
49
50
50
51 @pytest.fixture
51 @pytest.fixture()
52 def stub_fail_session():
52 def stub_fail_session():
53 """
53 """
54 Stub of `requests.Session()`.
54 Stub of `requests.Session()`.
55 """
55 """
56 session = mock.Mock()
56 session = mock.Mock()
57 post = session.post()
57 post = session.post()
58 post.content = msgpack.packb({'error': '500'})
58 post.content = msgpack.packb({'error': '500'})
59 post.status_code = 500
59 post.status_code = 500
60
60
61 session.reset_mock()
61 session.reset_mock()
62 return session
62 return session
63
63
64
64
65 @pytest.fixture
65 @pytest.fixture()
66 def stub_session_factory(stub_session):
66 def stub_session_factory(stub_session):
67 """
67 """
68 Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`.
68 Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`.
69 """
69 """
70 session_factory = mock.Mock()
70 session_factory = mock.Mock()
71 session_factory.return_value = stub_session
71 session_factory.return_value = stub_session
72 return session_factory
72 return session_factory
73
73
74
74
75 @pytest.fixture
75 @pytest.fixture()
76 def stub_session_failing_factory(stub_fail_session):
76 def stub_session_failing_factory(stub_fail_session):
77 """
77 """
78 Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`.
78 Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`.
79 """
79 """
80 session_factory = mock.Mock()
80 session_factory = mock.Mock()
81 session_factory.return_value = stub_fail_session
81 session_factory.return_value = stub_fail_session
82 return session_factory
82 return session_factory
83
83
84
84
85 def test_uses_persistent_http_connections(caplog, vcsbackend_hg):
85 def test_uses_persistent_http_connections(caplog, vcsbackend_hg):
86 repo = vcsbackend_hg.repo
86 repo = vcsbackend_hg.repo
87 remote_call = repo._remote.branches
87 remote_call = repo._remote.branches
88
88
89 with caplog.at_level(logging.INFO):
89 with caplog.at_level(logging.INFO):
90 for x in range(5):
90 for x in range(5):
91 remote_call(normal=True, closed=False)
91 remote_call(normal=True, closed=False)
92
92
93 new_connections = [
93 new_connections = [
94 r for r in caplog.record_tuples if is_new_connection(*r)]
94 r for r in caplog.record_tuples if is_new_connection(*r)]
95 assert len(new_connections) <= 1
95 assert len(new_connections) <= 1
96
96
97
97
98 def test_repo_maker_uses_session_for_classmethods(stub_session_factory):
98 def test_repo_maker_uses_session_for_classmethods(stub_session_factory):
99 repo_maker = client_http.RemoteVCSMaker(
99 repo_maker = client_http.RemoteVCSMaker(
100 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
100 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
101 repo_maker.example_call()
101 repo_maker.example_call()
102 stub_session_factory().post.assert_called_with(
102 stub_session_factory().post.assert_called_with(
103 'http://server_and_port/endpoint', data=mock.ANY)
103 'http://server_and_port/endpoint', data=mock.ANY)
104
104
105
105
106 def test_repo_maker_uses_session_for_instance_methods(
106 def test_repo_maker_uses_session_for_instance_methods(
107 stub_session_factory, config):
107 stub_session_factory, config):
108 repo_maker = client_http.RemoteVCSMaker(
108 repo_maker = client_http.RemoteVCSMaker(
109 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
109 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
110 repo = repo_maker('stub_path', 'stub_repo_id', config)
110 repo = repo_maker('stub_path', 'stub_repo_id', config)
111 repo.example_call()
111 repo.example_call()
112 stub_session_factory().post.assert_called_with(
112 stub_session_factory().post.assert_called_with(
113 'http://server_and_port/endpoint', data=mock.ANY)
113 'http://server_and_port/endpoint', data=mock.ANY)
114
114
115
115
116 @mock.patch('rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory')
116 @mock.patch('rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory')
117 @mock.patch('rhodecode.lib.vcs.connection')
117 @mock.patch('rhodecode.lib.vcs.connection')
118 def test_connect_passes_in_the_same_session(
118 def test_connect_passes_in_the_same_session(
119 connection, session_factory_class, stub_session):
119 connection, session_factory_class, stub_session):
120 session_factory = session_factory_class.return_value
120 session_factory = session_factory_class.return_value
121 session_factory.return_value = stub_session
121 session_factory.return_value = stub_session
122
122
123 vcs.connect_http('server_and_port')
123 vcs.connect_http('server_and_port')
124
124
125
125
126 def test_repo_maker_uses_session_that_throws_error(
126 def test_repo_maker_uses_session_that_throws_error(
127 stub_session_failing_factory, config):
127 stub_session_failing_factory, config):
128 repo_maker = client_http.RemoteVCSMaker(
128 repo_maker = client_http.RemoteVCSMaker(
129 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_failing_factory)
129 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_failing_factory)
130 repo = repo_maker('stub_path', 'stub_repo_id', config)
130 repo = repo_maker('stub_path', 'stub_repo_id', config)
131
131
132 with pytest.raises(exceptions.HttpVCSCommunicationError):
132 with pytest.raises(exceptions.HttpVCSCommunicationError):
133 repo.example_call()
133 repo.example_call()
@@ -1,1188 +1,1188 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.utils import make_db_config
26 from rhodecode.lib.utils import make_db_config
27 from rhodecode.lib.vcs import backends
27 from rhodecode.lib.vcs import backends
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 Reference, MergeResponse, MergeFailureReason)
29 Reference, MergeResponse, MergeFailureReason)
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 from rhodecode.lib.vcs.exceptions import (
31 from rhodecode.lib.vcs.exceptions import (
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
35
35
36
36
37 pytestmark = pytest.mark.backends("hg")
37 pytestmark = pytest.mark.backends("hg")
38
38
39
39
40 def repo_path_generator():
40 def repo_path_generator():
41 """
41 """
42 Return a different path to be used for cloning repos.
42 Return a different path to be used for cloning repos.
43 """
43 """
44 i = 0
44 i = 0
45 while True:
45 while True:
46 i += 1
46 i += 1
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
48
48
49 REPO_PATH_GENERATOR = repo_path_generator()
49 REPO_PATH_GENERATOR = repo_path_generator()
50
50
51
51
52 @pytest.fixture(scope='class', autouse=True)
52 @pytest.fixture(scope='class', autouse=True)
53 def repo(request, baseapp):
53 def repo(request, baseapp):
54 repo = MercurialRepository(TEST_HG_REPO)
54 repo = MercurialRepository(TEST_HG_REPO)
55 if request.cls:
55 if request.cls:
56 request.cls.repo = repo
56 request.cls.repo = repo
57 return repo
57 return repo
58
58
59
59
60 class TestMercurialRepository(object):
60 class TestMercurialRepository(object):
61
61
62 # pylint: disable=protected-access
62 # pylint: disable=protected-access
63
63
64 def get_clone_repo(self):
64 def get_clone_repo(self):
65 """
65 """
66 Return a clone of the base repo.
66 Return a clone of the base repo.
67 """
67 """
68 clone_path = next(REPO_PATH_GENERATOR)
68 clone_path = next(REPO_PATH_GENERATOR)
69 repo_clone = MercurialRepository(
69 repo_clone = MercurialRepository(
70 clone_path, create=True, src_url=self.repo.path)
70 clone_path, create=True, src_url=self.repo.path)
71
71
72 return repo_clone
72 return repo_clone
73
73
74 def get_empty_repo(self):
74 def get_empty_repo(self):
75 """
75 """
76 Return an empty repo.
76 Return an empty repo.
77 """
77 """
78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
79
79
80 def test_wrong_repo_path(self):
80 def test_wrong_repo_path(self):
81 wrong_repo_path = '/tmp/errorrepo_hg'
81 wrong_repo_path = '/tmp/errorrepo_hg'
82 with pytest.raises(RepositoryError):
82 with pytest.raises(RepositoryError):
83 MercurialRepository(wrong_repo_path)
83 MercurialRepository(wrong_repo_path)
84
84
85 def test_unicode_path_repo(self):
85 def test_unicode_path_repo(self):
86 with pytest.raises(VCSError):
86 with pytest.raises(VCSError):
87 MercurialRepository(u'iShouldFail')
87 MercurialRepository(u'iShouldFail')
88
88
89 def test_unicode_commit_id(self):
89 def test_unicode_commit_id(self):
90 with pytest.raises(CommitDoesNotExistError):
90 with pytest.raises(CommitDoesNotExistError):
91 self.repo.get_commit(u'unicode-commit-id')
91 self.repo.get_commit(u'unicode-commit-id')
92 with pytest.raises(CommitDoesNotExistError):
92 with pytest.raises(CommitDoesNotExistError):
93 self.repo.get_commit(u'unícøde-spéçial-chäråcter-commit-id')
93 self.repo.get_commit(u'unícøde-spéçial-chäråcter-commit-id')
94
94
95 def test_unicode_bookmark(self):
95 def test_unicode_bookmark(self):
96 self.repo.bookmark(u'unicode-bookmark')
96 self.repo.bookmark(u'unicode-bookmark')
97 self.repo.bookmark(u'unícøde-spéçial-chäråcter-bookmark')
97 self.repo.bookmark(u'unícøde-spéçial-chäråcter-bookmark')
98
98
99 def test_unicode_branch(self):
99 def test_unicode_branch(self):
100 with pytest.raises(KeyError):
100 with pytest.raises(KeyError):
101 self.repo.branches[u'unicode-branch']
101 self.repo.branches[u'unicode-branch']
102 with pytest.raises(KeyError):
102 with pytest.raises(KeyError):
103 self.repo.branches[u'unícøde-spéçial-chäråcter-branch']
103 self.repo.branches[u'unícøde-spéçial-chäråcter-branch']
104
104
105 def test_repo_clone(self):
105 def test_repo_clone(self):
106 if os.path.exists(TEST_HG_REPO_CLONE):
106 if os.path.exists(TEST_HG_REPO_CLONE):
107 self.fail(
107 self.fail(
108 'Cannot test mercurial clone repo as location %s already '
108 'Cannot test mercurial clone repo as location %s already '
109 'exists. You should manually remove it first.'
109 'exists. You should manually remove it first.'
110 % TEST_HG_REPO_CLONE)
110 % TEST_HG_REPO_CLONE)
111
111
112 repo = MercurialRepository(TEST_HG_REPO)
112 repo = MercurialRepository(TEST_HG_REPO)
113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
114 src_url=TEST_HG_REPO)
114 src_url=TEST_HG_REPO)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 # Checking hashes of commits should be enough
116 # Checking hashes of commits should be enough
117 for commit in repo.get_commits():
117 for commit in repo.get_commits():
118 raw_id = commit.raw_id
118 raw_id = commit.raw_id
119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
120
120
121 def test_repo_clone_with_update(self):
121 def test_repo_clone_with_update(self):
122 repo = MercurialRepository(TEST_HG_REPO)
122 repo = MercurialRepository(TEST_HG_REPO)
123 repo_clone = MercurialRepository(
123 repo_clone = MercurialRepository(
124 TEST_HG_REPO_CLONE + '_w_update',
124 TEST_HG_REPO_CLONE + '_w_update',
125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127
127
128 # check if current workdir was updated
128 # check if current workdir was updated
129 assert os.path.isfile(
129 assert os.path.isfile(
130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
131
131
132 def test_repo_clone_without_update(self):
132 def test_repo_clone_without_update(self):
133 repo = MercurialRepository(TEST_HG_REPO)
133 repo = MercurialRepository(TEST_HG_REPO)
134 repo_clone = MercurialRepository(
134 repo_clone = MercurialRepository(
135 TEST_HG_REPO_CLONE + '_wo_update',
135 TEST_HG_REPO_CLONE + '_wo_update',
136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
138 assert not os.path.isfile(
138 assert not os.path.isfile(
139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
140
140
141 def test_commit_ids(self):
141 def test_commit_ids(self):
142 # there are 21 commits at bitbucket now
142 # there are 21 commits at bitbucket now
143 # so we can assume they would be available from now on
143 # so we can assume they would be available from now on
144 subset = set([
144 subset = set([
145 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
145 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
146 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
146 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
147 '6cba7170863a2411822803fa77a0a264f1310b35',
147 '6cba7170863a2411822803fa77a0a264f1310b35',
148 '56349e29c2af3ac913b28bde9a2c6154436e615b',
148 '56349e29c2af3ac913b28bde9a2c6154436e615b',
149 '2dda4e345facb0ccff1a191052dd1606dba6781d',
149 '2dda4e345facb0ccff1a191052dd1606dba6781d',
150 '6fff84722075f1607a30f436523403845f84cd9e',
150 '6fff84722075f1607a30f436523403845f84cd9e',
151 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
151 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
152 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
152 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
153 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
153 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
154 'be90031137367893f1c406e0a8683010fd115b79',
154 'be90031137367893f1c406e0a8683010fd115b79',
155 'db8e58be770518cbb2b1cdfa69146e47cd481481',
155 'db8e58be770518cbb2b1cdfa69146e47cd481481',
156 '84478366594b424af694a6c784cb991a16b87c21',
156 '84478366594b424af694a6c784cb991a16b87c21',
157 '17f8e105dddb9f339600389c6dc7175d395a535c',
157 '17f8e105dddb9f339600389c6dc7175d395a535c',
158 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
158 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
159 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
159 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
160 '786facd2c61deb9cf91e9534735124fb8fc11842',
160 '786facd2c61deb9cf91e9534735124fb8fc11842',
161 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
161 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
162 'aa6a0de05b7612707db567078e130a6cd114a9a7',
162 'aa6a0de05b7612707db567078e130a6cd114a9a7',
163 'eada5a770da98ab0dd7325e29d00e0714f228d09'
163 'eada5a770da98ab0dd7325e29d00e0714f228d09'
164 ])
164 ])
165 assert subset.issubset(set(self.repo.commit_ids))
165 assert subset.issubset(set(self.repo.commit_ids))
166
166
167 # check if we have the proper order of commits
167 # check if we have the proper order of commits
168 org = [
168 org = [
169 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
169 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
170 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
170 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
171 '6cba7170863a2411822803fa77a0a264f1310b35',
171 '6cba7170863a2411822803fa77a0a264f1310b35',
172 '56349e29c2af3ac913b28bde9a2c6154436e615b',
172 '56349e29c2af3ac913b28bde9a2c6154436e615b',
173 '2dda4e345facb0ccff1a191052dd1606dba6781d',
173 '2dda4e345facb0ccff1a191052dd1606dba6781d',
174 '6fff84722075f1607a30f436523403845f84cd9e',
174 '6fff84722075f1607a30f436523403845f84cd9e',
175 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
175 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
176 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
176 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
177 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
177 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
178 'be90031137367893f1c406e0a8683010fd115b79',
178 'be90031137367893f1c406e0a8683010fd115b79',
179 'db8e58be770518cbb2b1cdfa69146e47cd481481',
179 'db8e58be770518cbb2b1cdfa69146e47cd481481',
180 '84478366594b424af694a6c784cb991a16b87c21',
180 '84478366594b424af694a6c784cb991a16b87c21',
181 '17f8e105dddb9f339600389c6dc7175d395a535c',
181 '17f8e105dddb9f339600389c6dc7175d395a535c',
182 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
182 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
183 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
183 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
184 '786facd2c61deb9cf91e9534735124fb8fc11842',
184 '786facd2c61deb9cf91e9534735124fb8fc11842',
185 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
185 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
186 'aa6a0de05b7612707db567078e130a6cd114a9a7',
186 'aa6a0de05b7612707db567078e130a6cd114a9a7',
187 'eada5a770da98ab0dd7325e29d00e0714f228d09',
187 'eada5a770da98ab0dd7325e29d00e0714f228d09',
188 '2c1885c735575ca478bf9e17b0029dca68824458',
188 '2c1885c735575ca478bf9e17b0029dca68824458',
189 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
189 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
190 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
190 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
191 '4fb8326d78e5120da2c7468dcf7098997be385da',
191 '4fb8326d78e5120da2c7468dcf7098997be385da',
192 '62b4a097164940bd66030c4db51687f3ec035eed',
192 '62b4a097164940bd66030c4db51687f3ec035eed',
193 '536c1a19428381cfea92ac44985304f6a8049569',
193 '536c1a19428381cfea92ac44985304f6a8049569',
194 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
194 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
195 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
195 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
196 'f8940bcb890a98c4702319fbe36db75ea309b475',
196 'f8940bcb890a98c4702319fbe36db75ea309b475',
197 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
197 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
198 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
198 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
199 'ee87846a61c12153b51543bf860e1026c6d3dcba',
199 'ee87846a61c12153b51543bf860e1026c6d3dcba',
200 ]
200 ]
201 assert org == self.repo.commit_ids[:31]
201 assert org == self.repo.commit_ids[:31]
202
202
203 def test_iter_slice(self):
203 def test_iter_slice(self):
204 sliced = list(self.repo[:10])
204 sliced = list(self.repo[:10])
205 itered = list(self.repo)[:10]
205 itered = list(self.repo)[:10]
206 assert sliced == itered
206 assert sliced == itered
207
207
208 def test_slicing(self):
208 def test_slicing(self):
209 # 4 1 5 10 95
209 # 4 1 5 10 95
210 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
210 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
211 (10, 20, 10), (5, 100, 95)]:
211 (10, 20, 10), (5, 100, 95)]:
212 indexes = list(self.repo[sfrom:sto])
212 indexes = list(self.repo[sfrom:sto])
213 assert len(indexes) == size
213 assert len(indexes) == size
214 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
214 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
215 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
215 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
216
216
217 def test_branches(self):
217 def test_branches(self):
218 # TODO: Need more tests here
218 # TODO: Need more tests here
219
219
220 # active branches
220 # active branches
221 assert 'default' in self.repo.branches
221 assert 'default' in self.repo.branches
222 assert 'stable' in self.repo.branches
222 assert 'stable' in self.repo.branches
223
223
224 # closed
224 # closed
225 assert 'git' in self.repo._get_branches(closed=True)
225 assert 'git' in self.repo._get_branches(closed=True)
226 assert 'web' in self.repo._get_branches(closed=True)
226 assert 'web' in self.repo._get_branches(closed=True)
227
227
228 for name, id in self.repo.branches.items():
228 for name, id in self.repo.branches.items():
229 assert isinstance(self.repo.get_commit(id), MercurialCommit)
229 assert isinstance(self.repo.get_commit(id), MercurialCommit)
230
230
231 def test_tip_in_tags(self):
231 def test_tip_in_tags(self):
232 # tip is always a tag
232 # tip is always a tag
233 assert 'tip' in self.repo.tags
233 assert 'tip' in self.repo.tags
234
234
235 def test_tip_commit_in_tags(self):
235 def test_tip_commit_in_tags(self):
236 tip = self.repo.get_commit()
236 tip = self.repo.get_commit()
237 assert self.repo.tags['tip'] == tip.raw_id
237 assert self.repo.tags['tip'] == tip.raw_id
238
238
239 def test_initial_commit(self):
239 def test_initial_commit(self):
240 init_commit = self.repo.get_commit(commit_idx=0)
240 init_commit = self.repo.get_commit(commit_idx=0)
241 init_author = init_commit.author
241 init_author = init_commit.author
242
242
243 assert init_commit.message == 'initial import'
243 assert init_commit.message == 'initial import'
244 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
244 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
245 assert init_author == init_commit.committer
245 assert init_author == init_commit.committer
246 assert sorted(init_commit._file_paths) == sorted([
246 assert sorted(init_commit._file_paths) == sorted([
247 'vcs/__init__.py',
247 'vcs/__init__.py',
248 'vcs/backends/BaseRepository.py',
248 'vcs/backends/BaseRepository.py',
249 'vcs/backends/__init__.py',
249 'vcs/backends/__init__.py',
250 ])
250 ])
251 assert sorted(init_commit._dir_paths) == sorted(
251 assert sorted(init_commit._dir_paths) == sorted(
252 ['', 'vcs', 'vcs/backends'])
252 ['', 'vcs', 'vcs/backends'])
253
253
254 assert init_commit._dir_paths + init_commit._file_paths == \
254 assert init_commit._dir_paths + init_commit._file_paths == \
255 init_commit._paths
255 init_commit._paths
256
256
257 with pytest.raises(NodeDoesNotExistError):
257 with pytest.raises(NodeDoesNotExistError):
258 init_commit.get_node(path='foobar')
258 init_commit.get_node(path='foobar')
259
259
260 node = init_commit.get_node('vcs/')
260 node = init_commit.get_node('vcs/')
261 assert hasattr(node, 'kind')
261 assert hasattr(node, 'kind')
262 assert node.kind == NodeKind.DIR
262 assert node.kind == NodeKind.DIR
263
263
264 node = init_commit.get_node('vcs')
264 node = init_commit.get_node('vcs')
265 assert hasattr(node, 'kind')
265 assert hasattr(node, 'kind')
266 assert node.kind == NodeKind.DIR
266 assert node.kind == NodeKind.DIR
267
267
268 node = init_commit.get_node('vcs/__init__.py')
268 node = init_commit.get_node('vcs/__init__.py')
269 assert hasattr(node, 'kind')
269 assert hasattr(node, 'kind')
270 assert node.kind == NodeKind.FILE
270 assert node.kind == NodeKind.FILE
271
271
272 def test_not_existing_commit(self):
272 def test_not_existing_commit(self):
273 # rawid
273 # rawid
274 with pytest.raises(RepositoryError):
274 with pytest.raises(RepositoryError):
275 self.repo.get_commit('abcd' * 10)
275 self.repo.get_commit('abcd' * 10)
276 # shortid
276 # shortid
277 with pytest.raises(RepositoryError):
277 with pytest.raises(RepositoryError):
278 self.repo.get_commit('erro' * 4)
278 self.repo.get_commit('erro' * 4)
279 # numeric
279 # numeric
280 with pytest.raises(RepositoryError):
280 with pytest.raises(RepositoryError):
281 self.repo.get_commit(commit_idx=self.repo.count() + 1)
281 self.repo.get_commit(commit_idx=self.repo.count() + 1)
282
282
283 # Small chance we ever get to this one
283 # Small chance we ever get to this one
284 idx = pow(2, 30)
284 idx = pow(2, 30)
285 with pytest.raises(RepositoryError):
285 with pytest.raises(RepositoryError):
286 self.repo.get_commit(commit_idx=idx)
286 self.repo.get_commit(commit_idx=idx)
287
287
288 def test_commit10(self):
288 def test_commit10(self):
289 commit10 = self.repo.get_commit(commit_idx=10)
289 commit10 = self.repo.get_commit(commit_idx=10)
290 README = """===
290 README = """===
291 VCS
291 VCS
292 ===
292 ===
293
293
294 Various Version Control System management abstraction layer for Python.
294 Various Version Control System management abstraction layer for Python.
295
295
296 Introduction
296 Introduction
297 ------------
297 ------------
298
298
299 TODO: To be written...
299 TODO: To be written...
300
300
301 """
301 """
302 node = commit10.get_node('README.rst')
302 node = commit10.get_node('README.rst')
303 assert node.kind == NodeKind.FILE
303 assert node.kind == NodeKind.FILE
304 assert node.content == README
304 assert node.content == README
305
305
306 def test_local_clone(self):
306 def test_local_clone(self):
307 clone_path = next(REPO_PATH_GENERATOR)
307 clone_path = next(REPO_PATH_GENERATOR)
308 self.repo._local_clone(clone_path)
308 self.repo._local_clone(clone_path)
309 repo_clone = MercurialRepository(clone_path)
309 repo_clone = MercurialRepository(clone_path)
310
310
311 assert self.repo.commit_ids == repo_clone.commit_ids
311 assert self.repo.commit_ids == repo_clone.commit_ids
312
312
313 def test_local_clone_fails_if_target_exists(self):
313 def test_local_clone_fails_if_target_exists(self):
314 with pytest.raises(RepositoryError):
314 with pytest.raises(RepositoryError):
315 self.repo._local_clone(self.repo.path)
315 self.repo._local_clone(self.repo.path)
316
316
317 def test_update(self):
317 def test_update(self):
318 repo_clone = self.get_clone_repo()
318 repo_clone = self.get_clone_repo()
319 branches = repo_clone.branches
319 branches = repo_clone.branches
320
320
321 repo_clone._update('default')
321 repo_clone._update('default')
322 assert branches['default'] == repo_clone._identify()
322 assert branches['default'] == repo_clone._identify()
323 repo_clone._update('stable')
323 repo_clone._update('stable')
324 assert branches['stable'] == repo_clone._identify()
324 assert branches['stable'] == repo_clone._identify()
325
325
326 def test_local_pull_branch(self):
326 def test_local_pull_branch(self):
327 target_repo = self.get_empty_repo()
327 target_repo = self.get_empty_repo()
328 source_repo = self.get_clone_repo()
328 source_repo = self.get_clone_repo()
329
329
330 default = Reference(
330 default = Reference(
331 'branch', 'default', source_repo.branches['default'])
331 'branch', 'default', source_repo.branches['default'])
332 target_repo._local_pull(source_repo.path, default)
332 target_repo._local_pull(source_repo.path, default)
333 target_repo = MercurialRepository(target_repo.path)
333 target_repo = MercurialRepository(target_repo.path)
334 assert (target_repo.branches['default'] ==
334 assert (target_repo.branches['default'] ==
335 source_repo.branches['default'])
335 source_repo.branches['default'])
336
336
337 stable = Reference('branch', 'stable', source_repo.branches['stable'])
337 stable = Reference('branch', 'stable', source_repo.branches['stable'])
338 target_repo._local_pull(source_repo.path, stable)
338 target_repo._local_pull(source_repo.path, stable)
339 target_repo = MercurialRepository(target_repo.path)
339 target_repo = MercurialRepository(target_repo.path)
340 assert target_repo.branches['stable'] == source_repo.branches['stable']
340 assert target_repo.branches['stable'] == source_repo.branches['stable']
341
341
342 def test_local_pull_bookmark(self):
342 def test_local_pull_bookmark(self):
343 target_repo = self.get_empty_repo()
343 target_repo = self.get_empty_repo()
344 source_repo = self.get_clone_repo()
344 source_repo = self.get_clone_repo()
345
345
346 commits = list(source_repo.get_commits(branch_name='default'))
346 commits = list(source_repo.get_commits(branch_name='default'))
347 foo1_id = commits[-5].raw_id
347 foo1_id = commits[-5].raw_id
348 foo1 = Reference('book', 'foo1', foo1_id)
348 foo1 = Reference('book', 'foo1', foo1_id)
349 source_repo._update(foo1_id)
349 source_repo._update(foo1_id)
350 source_repo.bookmark('foo1')
350 source_repo.bookmark('foo1')
351
351
352 foo2_id = commits[-3].raw_id
352 foo2_id = commits[-3].raw_id
353 foo2 = Reference('book', 'foo2', foo2_id)
353 foo2 = Reference('book', 'foo2', foo2_id)
354 source_repo._update(foo2_id)
354 source_repo._update(foo2_id)
355 source_repo.bookmark('foo2')
355 source_repo.bookmark('foo2')
356
356
357 target_repo._local_pull(source_repo.path, foo1)
357 target_repo._local_pull(source_repo.path, foo1)
358 target_repo = MercurialRepository(target_repo.path)
358 target_repo = MercurialRepository(target_repo.path)
359 assert target_repo.branches['default'] == commits[-5].raw_id
359 assert target_repo.branches['default'] == commits[-5].raw_id
360
360
361 target_repo._local_pull(source_repo.path, foo2)
361 target_repo._local_pull(source_repo.path, foo2)
362 target_repo = MercurialRepository(target_repo.path)
362 target_repo = MercurialRepository(target_repo.path)
363 assert target_repo.branches['default'] == commits[-3].raw_id
363 assert target_repo.branches['default'] == commits[-3].raw_id
364
364
365 def test_local_pull_commit(self):
365 def test_local_pull_commit(self):
366 target_repo = self.get_empty_repo()
366 target_repo = self.get_empty_repo()
367 source_repo = self.get_clone_repo()
367 source_repo = self.get_clone_repo()
368
368
369 commits = list(source_repo.get_commits(branch_name='default'))
369 commits = list(source_repo.get_commits(branch_name='default'))
370 commit_id = commits[-5].raw_id
370 commit_id = commits[-5].raw_id
371 commit = Reference('rev', commit_id, commit_id)
371 commit = Reference('rev', commit_id, commit_id)
372 target_repo._local_pull(source_repo.path, commit)
372 target_repo._local_pull(source_repo.path, commit)
373 target_repo = MercurialRepository(target_repo.path)
373 target_repo = MercurialRepository(target_repo.path)
374 assert target_repo.branches['default'] == commit_id
374 assert target_repo.branches['default'] == commit_id
375
375
376 commit_id = commits[-3].raw_id
376 commit_id = commits[-3].raw_id
377 commit = Reference('rev', commit_id, commit_id)
377 commit = Reference('rev', commit_id, commit_id)
378 target_repo._local_pull(source_repo.path, commit)
378 target_repo._local_pull(source_repo.path, commit)
379 target_repo = MercurialRepository(target_repo.path)
379 target_repo = MercurialRepository(target_repo.path)
380 assert target_repo.branches['default'] == commit_id
380 assert target_repo.branches['default'] == commit_id
381
381
382 def test_local_pull_from_same_repo(self):
382 def test_local_pull_from_same_repo(self):
383 reference = Reference('branch', 'default', None)
383 reference = Reference('branch', 'default', None)
384 with pytest.raises(ValueError):
384 with pytest.raises(ValueError):
385 self.repo._local_pull(self.repo.path, reference)
385 self.repo._local_pull(self.repo.path, reference)
386
386
387 def test_validate_pull_reference_raises_on_missing_reference(
387 def test_validate_pull_reference_raises_on_missing_reference(
388 self, vcsbackend_hg):
388 self, vcsbackend_hg):
389 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
389 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
390 reference = Reference(
390 reference = Reference(
391 'book', 'invalid_reference', 'a' * 40)
391 'book', 'invalid_reference', 'a' * 40)
392
392
393 with pytest.raises(CommitDoesNotExistError):
393 with pytest.raises(CommitDoesNotExistError):
394 target_repo._validate_pull_reference(reference)
394 target_repo._validate_pull_reference(reference)
395
395
396 def test_heads(self):
396 def test_heads(self):
397 assert set(self.repo._heads()) == set(self.repo.branches.values())
397 assert set(self.repo._heads()) == set(self.repo.branches.values())
398
398
399 def test_ancestor(self):
399 def test_ancestor(self):
400 commits = [
400 commits = [
401 c.raw_id for c in self.repo.get_commits(branch_name='default')]
401 c.raw_id for c in self.repo.get_commits(branch_name='default')]
402 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
402 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
403 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
403 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
404
404
405 def test_local_push(self):
405 def test_local_push(self):
406 target_repo = self.get_empty_repo()
406 target_repo = self.get_empty_repo()
407
407
408 revisions = list(self.repo.get_commits(branch_name='default'))
408 revisions = list(self.repo.get_commits(branch_name='default'))
409 revision = revisions[-5].raw_id
409 revision = revisions[-5].raw_id
410 self.repo._local_push(revision, target_repo.path)
410 self.repo._local_push(revision, target_repo.path)
411
411
412 target_repo = MercurialRepository(target_repo.path)
412 target_repo = MercurialRepository(target_repo.path)
413
413
414 assert target_repo.branches['default'] == revision
414 assert target_repo.branches['default'] == revision
415
415
416 def test_hooks_can_be_enabled_for_local_push(self):
416 def test_hooks_can_be_enabled_for_local_push(self):
417 revision = 'deadbeef'
417 revision = 'deadbeef'
418 repo_path = 'test_group/test_repo'
418 repo_path = 'test_group/test_repo'
419 with mock.patch.object(self.repo, '_remote') as remote_mock:
419 with mock.patch.object(self.repo, '_remote') as remote_mock:
420 self.repo._local_push(revision, repo_path, enable_hooks=True)
420 self.repo._local_push(revision, repo_path, enable_hooks=True)
421 remote_mock.push.assert_called_once_with(
421 remote_mock.push.assert_called_once_with(
422 [revision], repo_path, hooks=True, push_branches=False)
422 [revision], repo_path, hooks=True, push_branches=False)
423
423
424 def test_local_merge(self, vcsbackend_hg):
424 def test_local_merge(self, vcsbackend_hg):
425 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
425 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
426 source_repo = vcsbackend_hg.clone_repo(target_repo)
426 source_repo = vcsbackend_hg.clone_repo(target_repo)
427 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
427 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
428 target_repo = MercurialRepository(target_repo.path)
428 target_repo = MercurialRepository(target_repo.path)
429 target_rev = target_repo.branches['default']
429 target_rev = target_repo.branches['default']
430 target_ref = Reference(
430 target_ref = Reference(
431 type='branch', name='default', commit_id=target_rev)
431 type='branch', name='default', commit_id=target_rev)
432 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
432 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
433 source_repo = MercurialRepository(source_repo.path)
433 source_repo = MercurialRepository(source_repo.path)
434 source_rev = source_repo.branches['default']
434 source_rev = source_repo.branches['default']
435 source_ref = Reference(
435 source_ref = Reference(
436 type='branch', name='default', commit_id=source_rev)
436 type='branch', name='default', commit_id=source_rev)
437
437
438 target_repo._local_pull(source_repo.path, source_ref)
438 target_repo._local_pull(source_repo.path, source_ref)
439
439
440 merge_message = 'Merge message\n\nDescription:...'
440 merge_message = 'Merge message\n\nDescription:...'
441 user_name = 'Albert Einstein'
441 user_name = 'Albert Einstein'
442 user_email = 'albert@einstein.com'
442 user_email = 'albert@einstein.com'
443 merge_commit_id, needs_push = target_repo._local_merge(
443 merge_commit_id, needs_push = target_repo._local_merge(
444 target_ref, merge_message, user_name, user_email, source_ref)
444 target_ref, merge_message, user_name, user_email, source_ref)
445 assert needs_push
445 assert needs_push
446
446
447 target_repo = MercurialRepository(target_repo.path)
447 target_repo = MercurialRepository(target_repo.path)
448 assert target_repo.commit_ids[-3] == target_rev
448 assert target_repo.commit_ids[-3] == target_rev
449 assert target_repo.commit_ids[-2] == source_rev
449 assert target_repo.commit_ids[-2] == source_rev
450 last_commit = target_repo.get_commit(merge_commit_id)
450 last_commit = target_repo.get_commit(merge_commit_id)
451 assert last_commit.message.strip() == merge_message
451 assert last_commit.message.strip() == merge_message
452 assert last_commit.author == '%s <%s>' % (user_name, user_email)
452 assert last_commit.author == '%s <%s>' % (user_name, user_email)
453
453
454 assert not os.path.exists(
454 assert not os.path.exists(
455 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
455 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
456
456
457 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
457 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
458 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
458 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
459 source_repo = vcsbackend_hg.clone_repo(target_repo)
459 source_repo = vcsbackend_hg.clone_repo(target_repo)
460 target_rev = target_repo.branches['default']
460 target_rev = target_repo.branches['default']
461 target_ref = Reference(
461 target_ref = Reference(
462 type='branch', name='default', commit_id=target_rev)
462 type='branch', name='default', commit_id=target_rev)
463 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
463 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
464 source_repo = MercurialRepository(source_repo.path)
464 source_repo = MercurialRepository(source_repo.path)
465 source_rev = source_repo.branches['default']
465 source_rev = source_repo.branches['default']
466 source_ref = Reference(
466 source_ref = Reference(
467 type='branch', name='default', commit_id=source_rev)
467 type='branch', name='default', commit_id=source_rev)
468
468
469 target_repo._local_pull(source_repo.path, source_ref)
469 target_repo._local_pull(source_repo.path, source_ref)
470
470
471 merge_message = 'Merge message\n\nDescription:...'
471 merge_message = 'Merge message\n\nDescription:...'
472 user_name = 'Albert Einstein'
472 user_name = 'Albert Einstein'
473 user_email = 'albert@einstein.com'
473 user_email = 'albert@einstein.com'
474 merge_commit_id, needs_push = target_repo._local_merge(
474 merge_commit_id, needs_push = target_repo._local_merge(
475 target_ref, merge_message, user_name, user_email, source_ref)
475 target_ref, merge_message, user_name, user_email, source_ref)
476 assert merge_commit_id == source_rev
476 assert merge_commit_id == source_rev
477 assert needs_push
477 assert needs_push
478
478
479 target_repo = MercurialRepository(target_repo.path)
479 target_repo = MercurialRepository(target_repo.path)
480 assert target_repo.commit_ids[-2] == target_rev
480 assert target_repo.commit_ids[-2] == target_rev
481 assert target_repo.commit_ids[-1] == source_rev
481 assert target_repo.commit_ids[-1] == source_rev
482
482
483 assert not os.path.exists(
483 assert not os.path.exists(
484 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
484 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
485
485
486 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
486 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
487 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
487 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
488 target_rev = target_repo.branches['default']
488 target_rev = target_repo.branches['default']
489 target_ref = Reference(
489 target_ref = Reference(
490 type='branch', name='default', commit_id=target_rev)
490 type='branch', name='default', commit_id=target_rev)
491
491
492 merge_message = 'Merge message\n\nDescription:...'
492 merge_message = 'Merge message\n\nDescription:...'
493 user_name = 'Albert Einstein'
493 user_name = 'Albert Einstein'
494 user_email = 'albert@einstein.com'
494 user_email = 'albert@einstein.com'
495 merge_commit_id, needs_push = target_repo._local_merge(
495 merge_commit_id, needs_push = target_repo._local_merge(
496 target_ref, merge_message, user_name, user_email, target_ref)
496 target_ref, merge_message, user_name, user_email, target_ref)
497 assert merge_commit_id == target_rev
497 assert merge_commit_id == target_rev
498 assert not needs_push
498 assert not needs_push
499
499
500 target_repo = MercurialRepository(target_repo.path)
500 target_repo = MercurialRepository(target_repo.path)
501 assert target_repo.commit_ids[-1] == target_rev
501 assert target_repo.commit_ids[-1] == target_rev
502
502
503 assert not os.path.exists(
503 assert not os.path.exists(
504 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
504 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
505
505
506 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
506 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
507 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
507 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
508 source_repo = vcsbackend_hg.clone_repo(target_repo)
508 source_repo = vcsbackend_hg.clone_repo(target_repo)
509 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
509 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
510 target_repo = MercurialRepository(target_repo.path)
510 target_repo = MercurialRepository(target_repo.path)
511 target_rev = target_repo.branches['default']
511 target_rev = target_repo.branches['default']
512 target_ref = Reference(
512 target_ref = Reference(
513 type='branch', name='default', commit_id=target_rev)
513 type='branch', name='default', commit_id=target_rev)
514 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
514 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
515 source_repo = MercurialRepository(source_repo.path)
515 source_repo = MercurialRepository(source_repo.path)
516 source_rev = source_repo.branches['default']
516 source_rev = source_repo.branches['default']
517 source_ref = Reference(
517 source_ref = Reference(
518 type='branch', name='default', commit_id=source_rev)
518 type='branch', name='default', commit_id=source_rev)
519
519
520 target_repo._local_pull(source_repo.path, source_ref)
520 target_repo._local_pull(source_repo.path, source_ref)
521 with pytest.raises(RepositoryError):
521 with pytest.raises(RepositoryError):
522 target_repo._local_merge(
522 target_repo._local_merge(
523 target_ref, 'merge_message', 'user name', 'user@name.com',
523 target_ref, 'merge_message', 'user name', 'user@name.com',
524 source_ref)
524 source_ref)
525
525
526 # Check we are not left in an intermediate merge state
526 # Check we are not left in an intermediate merge state
527 assert not os.path.exists(
527 assert not os.path.exists(
528 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
528 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
529
529
530 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
530 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
531 commits = [
531 commits = [
532 {'message': 'a'},
532 {'message': 'a'},
533 {'message': 'b', 'branch': 'b'},
533 {'message': 'b', 'branch': 'b'},
534 ]
534 ]
535 repo = backend_hg.create_repo(commits)
535 repo = backend_hg.create_repo(commits)
536 commit_ids = backend_hg.commit_ids
536 commit_ids = backend_hg.commit_ids
537 target_ref = Reference(
537 target_ref = Reference(
538 type='branch', name='default', commit_id=commit_ids['a'])
538 type='branch', name='default', commit_id=commit_ids['a'])
539 source_ref = Reference(
539 source_ref = Reference(
540 type='branch', name='b', commit_id=commit_ids['b'])
540 type='branch', name='b', commit_id=commit_ids['b'])
541 merge_message = 'Merge message\n\nDescription:...'
541 merge_message = 'Merge message\n\nDescription:...'
542 user_name = 'Albert Einstein'
542 user_name = 'Albert Einstein'
543 user_email = 'albert@einstein.com'
543 user_email = 'albert@einstein.com'
544 vcs_repo = repo.scm_instance()
544 vcs_repo = repo.scm_instance()
545 merge_commit_id, needs_push = vcs_repo._local_merge(
545 merge_commit_id, needs_push = vcs_repo._local_merge(
546 target_ref, merge_message, user_name, user_email, source_ref)
546 target_ref, merge_message, user_name, user_email, source_ref)
547 assert merge_commit_id != source_ref.commit_id
547 assert merge_commit_id != source_ref.commit_id
548 assert needs_push is True
548 assert needs_push is True
549 commit = vcs_repo.get_commit(merge_commit_id)
549 commit = vcs_repo.get_commit(merge_commit_id)
550 assert commit.merge is True
550 assert commit.merge is True
551 assert commit.message == merge_message
551 assert commit.message == merge_message
552
552
553 def test_maybe_prepare_merge_workspace(self):
553 def test_maybe_prepare_merge_workspace(self):
554 workspace = self.repo._maybe_prepare_merge_workspace(
554 workspace = self.repo._maybe_prepare_merge_workspace(
555 1, 'pr2', 'unused', 'unused2')
555 1, 'pr2', 'unused', 'unused2')
556
556
557 assert os.path.isdir(workspace)
557 assert os.path.isdir(workspace)
558 workspace_repo = MercurialRepository(workspace)
558 workspace_repo = MercurialRepository(workspace)
559 assert workspace_repo.branches == self.repo.branches
559 assert workspace_repo.branches == self.repo.branches
560
560
561 # Calling it a second time should also succeed
561 # Calling it a second time should also succeed
562 workspace = self.repo._maybe_prepare_merge_workspace(
562 workspace = self.repo._maybe_prepare_merge_workspace(
563 1, 'pr2', 'unused', 'unused2')
563 1, 'pr2', 'unused', 'unused2')
564 assert os.path.isdir(workspace)
564 assert os.path.isdir(workspace)
565
565
566 def test_cleanup_merge_workspace(self):
566 def test_cleanup_merge_workspace(self):
567 workspace = self.repo._maybe_prepare_merge_workspace(
567 workspace = self.repo._maybe_prepare_merge_workspace(
568 1, 'pr3', 'unused', 'unused2')
568 1, 'pr3', 'unused', 'unused2')
569
569
570 assert os.path.isdir(workspace)
570 assert os.path.isdir(workspace)
571 self.repo.cleanup_merge_workspace(1, 'pr3')
571 self.repo.cleanup_merge_workspace(1, 'pr3')
572
572
573 assert not os.path.exists(workspace)
573 assert not os.path.exists(workspace)
574
574
575 def test_cleanup_merge_workspace_invalid_workspace_id(self):
575 def test_cleanup_merge_workspace_invalid_workspace_id(self):
576 # No assert: because in case of an inexistent workspace this function
576 # No assert: because in case of an inexistent workspace this function
577 # should still succeed.
577 # should still succeed.
578 self.repo.cleanup_merge_workspace(1, 'pr4')
578 self.repo.cleanup_merge_workspace(1, 'pr4')
579
579
580 def test_merge_target_is_bookmark(self, vcsbackend_hg):
580 def test_merge_target_is_bookmark(self, vcsbackend_hg):
581 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
581 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
582 source_repo = vcsbackend_hg.clone_repo(target_repo)
582 source_repo = vcsbackend_hg.clone_repo(target_repo)
583 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
583 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
584 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
584 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
585 imc = source_repo.in_memory_commit
585 imc = source_repo.in_memory_commit
586 imc.add(FileNode('file_x', content=source_repo.name))
586 imc.add(FileNode('file_x', content=source_repo.name))
587 imc.commit(
587 imc.commit(
588 message=u'Automatic commit from repo merge test',
588 message=u'Automatic commit from repo merge test',
589 author=u'Automatic <automatic@rhodecode.com>')
589 author=u'Automatic <automatic@rhodecode.com>')
590 target_commit = target_repo.get_commit()
590 target_commit = target_repo.get_commit()
591 source_commit = source_repo.get_commit()
591 source_commit = source_repo.get_commit()
592 default_branch = target_repo.DEFAULT_BRANCH_NAME
592 default_branch = target_repo.DEFAULT_BRANCH_NAME
593 bookmark_name = 'bookmark'
593 bookmark_name = 'bookmark'
594 target_repo._update(default_branch)
594 target_repo._update(default_branch)
595 target_repo.bookmark(bookmark_name)
595 target_repo.bookmark(bookmark_name)
596 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
596 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
597 source_ref = Reference('branch', default_branch, source_commit.raw_id)
597 source_ref = Reference('branch', default_branch, source_commit.raw_id)
598 workspace_id = 'test-merge'
598 workspace_id = 'test-merge'
599 repo_id = repo_id_generator(target_repo.path)
599 repo_id = repo_id_generator(target_repo.path)
600 merge_response = target_repo.merge(
600 merge_response = target_repo.merge(
601 repo_id, workspace_id, target_ref, source_repo, source_ref,
601 repo_id, workspace_id, target_ref, source_repo, source_ref,
602 'test user', 'test@rhodecode.com', 'merge message 1',
602 'test user', 'test@rhodecode.com', 'merge message 1',
603 dry_run=False)
603 dry_run=False)
604 expected_merge_response = MergeResponse(
604 expected_merge_response = MergeResponse(
605 True, True, merge_response.merge_ref,
605 True, True, merge_response.merge_ref,
606 MergeFailureReason.NONE)
606 MergeFailureReason.NONE)
607 assert merge_response == expected_merge_response
607 assert merge_response == expected_merge_response
608
608
609 target_repo = backends.get_backend(vcsbackend_hg.alias)(
609 target_repo = backends.get_backend(vcsbackend_hg.alias)(
610 target_repo.path)
610 target_repo.path)
611 target_commits = list(target_repo.get_commits())
611 target_commits = list(target_repo.get_commits())
612 commit_ids = [c.raw_id for c in target_commits[:-1]]
612 commit_ids = [c.raw_id for c in target_commits[:-1]]
613 assert source_ref.commit_id in commit_ids
613 assert source_ref.commit_id in commit_ids
614 assert target_ref.commit_id in commit_ids
614 assert target_ref.commit_id in commit_ids
615
615
616 merge_commit = target_commits[-1]
616 merge_commit = target_commits[-1]
617 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
617 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
618 assert merge_commit.message.strip() == 'merge message 1'
618 assert merge_commit.message.strip() == 'merge message 1'
619 assert merge_commit.author == 'test user <test@rhodecode.com>'
619 assert merge_commit.author == 'test user <test@rhodecode.com>'
620
620
621 # Check the bookmark was updated in the target repo
621 # Check the bookmark was updated in the target repo
622 assert (
622 assert (
623 target_repo.bookmarks[bookmark_name] ==
623 target_repo.bookmarks[bookmark_name] ==
624 merge_response.merge_ref.commit_id)
624 merge_response.merge_ref.commit_id)
625
625
626 def test_merge_source_is_bookmark(self, vcsbackend_hg):
626 def test_merge_source_is_bookmark(self, vcsbackend_hg):
627 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
627 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
628 source_repo = vcsbackend_hg.clone_repo(target_repo)
628 source_repo = vcsbackend_hg.clone_repo(target_repo)
629 imc = source_repo.in_memory_commit
629 imc = source_repo.in_memory_commit
630 imc.add(FileNode('file_x', content=source_repo.name))
630 imc.add(FileNode('file_x', content=source_repo.name))
631 imc.commit(
631 imc.commit(
632 message=u'Automatic commit from repo merge test',
632 message=u'Automatic commit from repo merge test',
633 author=u'Automatic <automatic@rhodecode.com>')
633 author=u'Automatic <automatic@rhodecode.com>')
634 target_commit = target_repo.get_commit()
634 target_commit = target_repo.get_commit()
635 source_commit = source_repo.get_commit()
635 source_commit = source_repo.get_commit()
636 default_branch = target_repo.DEFAULT_BRANCH_NAME
636 default_branch = target_repo.DEFAULT_BRANCH_NAME
637 bookmark_name = 'bookmark'
637 bookmark_name = 'bookmark'
638 target_ref = Reference('branch', default_branch, target_commit.raw_id)
638 target_ref = Reference('branch', default_branch, target_commit.raw_id)
639 source_repo._update(default_branch)
639 source_repo._update(default_branch)
640 source_repo.bookmark(bookmark_name)
640 source_repo.bookmark(bookmark_name)
641 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
641 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
642 workspace_id = 'test-merge'
642 workspace_id = 'test-merge'
643 repo_id = repo_id_generator(target_repo.path)
643 repo_id = repo_id_generator(target_repo.path)
644 merge_response = target_repo.merge(
644 merge_response = target_repo.merge(
645 repo_id, workspace_id, target_ref, source_repo, source_ref,
645 repo_id, workspace_id, target_ref, source_repo, source_ref,
646 'test user', 'test@rhodecode.com', 'merge message 1',
646 'test user', 'test@rhodecode.com', 'merge message 1',
647 dry_run=False)
647 dry_run=False)
648 expected_merge_response = MergeResponse(
648 expected_merge_response = MergeResponse(
649 True, True, merge_response.merge_ref,
649 True, True, merge_response.merge_ref,
650 MergeFailureReason.NONE)
650 MergeFailureReason.NONE)
651 assert merge_response == expected_merge_response
651 assert merge_response == expected_merge_response
652
652
653 target_repo = backends.get_backend(vcsbackend_hg.alias)(
653 target_repo = backends.get_backend(vcsbackend_hg.alias)(
654 target_repo.path)
654 target_repo.path)
655 target_commits = list(target_repo.get_commits())
655 target_commits = list(target_repo.get_commits())
656 commit_ids = [c.raw_id for c in target_commits]
656 commit_ids = [c.raw_id for c in target_commits]
657 assert source_ref.commit_id == commit_ids[-1]
657 assert source_ref.commit_id == commit_ids[-1]
658 assert target_ref.commit_id == commit_ids[-2]
658 assert target_ref.commit_id == commit_ids[-2]
659
659
660 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
660 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
661 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
661 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
662 source_repo = vcsbackend_hg.clone_repo(target_repo)
662 source_repo = vcsbackend_hg.clone_repo(target_repo)
663 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
663 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
664 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
664 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
665
665
666 # add an extra head to the target repo
666 # add an extra head to the target repo
667 imc = target_repo.in_memory_commit
667 imc = target_repo.in_memory_commit
668 imc.add(FileNode('file_x', content='foo'))
668 imc.add(FileNode('file_x', content='foo'))
669 commits = list(target_repo.get_commits())
669 commits = list(target_repo.get_commits())
670 imc.commit(
670 imc.commit(
671 message=u'Automatic commit from repo merge test',
671 message=u'Automatic commit from repo merge test',
672 author=u'Automatic <automatic@rhodecode.com>', parents=commits[0:1])
672 author=u'Automatic <automatic@rhodecode.com>', parents=commits[0:1])
673
673
674 target_commit = target_repo.get_commit()
674 target_commit = target_repo.get_commit()
675 source_commit = source_repo.get_commit()
675 source_commit = source_repo.get_commit()
676 default_branch = target_repo.DEFAULT_BRANCH_NAME
676 default_branch = target_repo.DEFAULT_BRANCH_NAME
677 target_repo._update(default_branch)
677 target_repo._update(default_branch)
678
678
679 target_ref = Reference('branch', default_branch, target_commit.raw_id)
679 target_ref = Reference('branch', default_branch, target_commit.raw_id)
680 source_ref = Reference('branch', default_branch, source_commit.raw_id)
680 source_ref = Reference('branch', default_branch, source_commit.raw_id)
681 workspace_id = 'test-merge'
681 workspace_id = 'test-merge'
682
682
683 assert len(target_repo._heads(branch='default')) == 2
683 assert len(target_repo._heads(branch='default')) == 2
684 heads = target_repo._heads(branch='default')
684 heads = target_repo._heads(branch='default')
685 expected_merge_response = MergeResponse(
685 expected_merge_response = MergeResponse(
686 False, False, None,
686 False, False, None,
687 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
687 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
688 metadata={'heads': heads})
688 metadata={'heads': heads})
689 repo_id = repo_id_generator(target_repo.path)
689 repo_id = repo_id_generator(target_repo.path)
690 merge_response = target_repo.merge(
690 merge_response = target_repo.merge(
691 repo_id, workspace_id, target_ref, source_repo, source_ref,
691 repo_id, workspace_id, target_ref, source_repo, source_ref,
692 'test user', 'test@rhodecode.com', 'merge message 1',
692 'test user', 'test@rhodecode.com', 'merge message 1',
693 dry_run=False)
693 dry_run=False)
694 assert merge_response == expected_merge_response
694 assert merge_response == expected_merge_response
695
695
696 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
696 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
697 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
697 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
698 source_repo = vcsbackend_hg.clone_repo(target_repo)
698 source_repo = vcsbackend_hg.clone_repo(target_repo)
699 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
699 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
700 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
700 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
701 imc = source_repo.in_memory_commit
701 imc = source_repo.in_memory_commit
702 imc.add(FileNode('file_x', content=source_repo.name))
702 imc.add(FileNode('file_x', content=source_repo.name))
703 imc.commit(
703 imc.commit(
704 message=u'Automatic commit from repo merge test',
704 message=u'Automatic commit from repo merge test',
705 author=u'Automatic <automatic@rhodecode.com>')
705 author=u'Automatic <automatic@rhodecode.com>')
706 target_commit = target_repo.get_commit()
706 target_commit = target_repo.get_commit()
707 source_commit = source_repo.get_commit()
707 source_commit = source_repo.get_commit()
708
708
709 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
709 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
710
710
711 default_branch = target_repo.DEFAULT_BRANCH_NAME
711 default_branch = target_repo.DEFAULT_BRANCH_NAME
712 bookmark_name = 'bookmark'
712 bookmark_name = 'bookmark'
713 source_repo._update(default_branch)
713 source_repo._update(default_branch)
714 source_repo.bookmark(bookmark_name)
714 source_repo.bookmark(bookmark_name)
715
715
716 target_ref = Reference('branch', default_branch, target_commit.raw_id)
716 target_ref = Reference('branch', default_branch, target_commit.raw_id)
717 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
717 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
718 repo_id = repo_id_generator(target_repo.path)
718 repo_id = repo_id_generator(target_repo.path)
719 workspace_id = 'test-merge'
719 workspace_id = 'test-merge'
720
720
721 merge_response = target_repo.merge(
721 merge_response = target_repo.merge(
722 repo_id, workspace_id, target_ref, source_repo, source_ref,
722 repo_id, workspace_id, target_ref, source_repo, source_ref,
723 'test user', 'test@rhodecode.com', 'merge message 1',
723 'test user', 'test@rhodecode.com', 'merge message 1',
724 dry_run=False, use_rebase=True)
724 dry_run=False, use_rebase=True)
725
725
726 expected_merge_response = MergeResponse(
726 expected_merge_response = MergeResponse(
727 True, True, merge_response.merge_ref,
727 True, True, merge_response.merge_ref,
728 MergeFailureReason.NONE)
728 MergeFailureReason.NONE)
729 assert merge_response == expected_merge_response
729 assert merge_response == expected_merge_response
730
730
731 target_repo = backends.get_backend(vcsbackend_hg.alias)(
731 target_repo = backends.get_backend(vcsbackend_hg.alias)(
732 target_repo.path)
732 target_repo.path)
733 last_commit = target_repo.get_commit()
733 last_commit = target_repo.get_commit()
734 assert last_commit.message == source_commit.message
734 assert last_commit.message == source_commit.message
735 assert last_commit.author == source_commit.author
735 assert last_commit.author == source_commit.author
736 # This checks that we effectively did a rebase
736 # This checks that we effectively did a rebase
737 assert last_commit.raw_id != source_commit.raw_id
737 assert last_commit.raw_id != source_commit.raw_id
738
738
739 # Check the target has only 4 commits: 2 were already in target and
739 # Check the target has only 4 commits: 2 were already in target and
740 # only two should have been added
740 # only two should have been added
741 assert len(target_repo.commit_ids) == 2 + 2
741 assert len(target_repo.commit_ids) == 2 + 2
742
742
743
743
744 class TestGetShadowInstance(object):
744 class TestGetShadowInstance(object):
745
745
746 @pytest.fixture
746 @pytest.fixture()
747 def repo(self, vcsbackend_hg, monkeypatch):
747 def repo(self, vcsbackend_hg, monkeypatch):
748 repo = vcsbackend_hg.repo
748 repo = vcsbackend_hg.repo
749 monkeypatch.setattr(repo, 'config', mock.Mock())
749 monkeypatch.setattr(repo, 'config', mock.Mock())
750 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
750 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
751 return repo
751 return repo
752
752
753 def test_passes_config(self, repo):
753 def test_passes_config(self, repo):
754 shadow = repo.get_shadow_instance(repo.path)
754 shadow = repo.get_shadow_instance(repo.path)
755 assert shadow.config == repo.config.copy()
755 assert shadow.config == repo.config.copy()
756
756
757 def test_disables_hooks(self, repo):
757 def test_disables_hooks(self, repo):
758 shadow = repo.get_shadow_instance(repo.path)
758 shadow = repo.get_shadow_instance(repo.path)
759 shadow.config.clear_section.assert_called_once_with('hooks')
759 shadow.config.clear_section.assert_called_once_with('hooks')
760
760
761 def test_allows_to_keep_hooks(self, repo):
761 def test_allows_to_keep_hooks(self, repo):
762 shadow = repo.get_shadow_instance(repo.path, enable_hooks=True)
762 shadow = repo.get_shadow_instance(repo.path, enable_hooks=True)
763 assert not shadow.config.clear_section.called
763 assert not shadow.config.clear_section.called
764
764
765
765
766 class TestMercurialCommit(object):
766 class TestMercurialCommit(object):
767
767
768 def _test_equality(self, commit):
768 def _test_equality(self, commit):
769 idx = commit.idx
769 idx = commit.idx
770 assert commit == self.repo.get_commit(commit_idx=idx)
770 assert commit == self.repo.get_commit(commit_idx=idx)
771
771
772 def test_equality(self):
772 def test_equality(self):
773 indexes = [0, 10, 20]
773 indexes = [0, 10, 20]
774 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
774 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
775 for commit in commits:
775 for commit in commits:
776 self._test_equality(commit)
776 self._test_equality(commit)
777
777
778 def test_default_commit(self):
778 def test_default_commit(self):
779 tip = self.repo.get_commit('tip')
779 tip = self.repo.get_commit('tip')
780 assert tip == self.repo.get_commit()
780 assert tip == self.repo.get_commit()
781 assert tip == self.repo.get_commit(commit_id=None)
781 assert tip == self.repo.get_commit(commit_id=None)
782 assert tip == self.repo.get_commit(commit_idx=None)
782 assert tip == self.repo.get_commit(commit_idx=None)
783 assert tip == list(self.repo[-1:])[0]
783 assert tip == list(self.repo[-1:])[0]
784
784
785 def test_root_node(self):
785 def test_root_node(self):
786 tip = self.repo.get_commit('tip')
786 tip = self.repo.get_commit('tip')
787 assert tip.root is tip.get_node('')
787 assert tip.root is tip.get_node('')
788
788
789 def test_lazy_fetch(self):
789 def test_lazy_fetch(self):
790 """
790 """
791 Test if commit's nodes expands and are cached as we walk through
791 Test if commit's nodes expands and are cached as we walk through
792 the commit. This test is somewhat hard to write as order of tests
792 the commit. This test is somewhat hard to write as order of tests
793 is a key here. Written by running command after command in a shell.
793 is a key here. Written by running command after command in a shell.
794 """
794 """
795 commit = self.repo.get_commit(commit_idx=45)
795 commit = self.repo.get_commit(commit_idx=45)
796 assert len(commit.nodes) == 0
796 assert len(commit.nodes) == 0
797 root = commit.root
797 root = commit.root
798 assert len(commit.nodes) == 1
798 assert len(commit.nodes) == 1
799 assert len(root.nodes) == 8
799 assert len(root.nodes) == 8
800 # accessing root.nodes updates commit.nodes
800 # accessing root.nodes updates commit.nodes
801 assert len(commit.nodes) == 9
801 assert len(commit.nodes) == 9
802
802
803 docs = root.get_node('docs')
803 docs = root.get_node('docs')
804 # we haven't yet accessed anything new as docs dir was already cached
804 # we haven't yet accessed anything new as docs dir was already cached
805 assert len(commit.nodes) == 9
805 assert len(commit.nodes) == 9
806 assert len(docs.nodes) == 8
806 assert len(docs.nodes) == 8
807 # accessing docs.nodes updates commit.nodes
807 # accessing docs.nodes updates commit.nodes
808 assert len(commit.nodes) == 17
808 assert len(commit.nodes) == 17
809
809
810 assert docs is commit.get_node('docs')
810 assert docs is commit.get_node('docs')
811 assert docs is root.nodes[0]
811 assert docs is root.nodes[0]
812 assert docs is root.dirs[0]
812 assert docs is root.dirs[0]
813 assert docs is commit.get_node('docs')
813 assert docs is commit.get_node('docs')
814
814
815 def test_nodes_with_commit(self):
815 def test_nodes_with_commit(self):
816 commit = self.repo.get_commit(commit_idx=45)
816 commit = self.repo.get_commit(commit_idx=45)
817 root = commit.root
817 root = commit.root
818 docs = root.get_node('docs')
818 docs = root.get_node('docs')
819 assert docs is commit.get_node('docs')
819 assert docs is commit.get_node('docs')
820 api = docs.get_node('api')
820 api = docs.get_node('api')
821 assert api is commit.get_node('docs/api')
821 assert api is commit.get_node('docs/api')
822 index = api.get_node('index.rst')
822 index = api.get_node('index.rst')
823 assert index is commit.get_node('docs/api/index.rst')
823 assert index is commit.get_node('docs/api/index.rst')
824 assert index is commit.get_node(
824 assert index is commit.get_node(
825 'docs').get_node('api').get_node('index.rst')
825 'docs').get_node('api').get_node('index.rst')
826
826
827 def test_branch_and_tags(self):
827 def test_branch_and_tags(self):
828 commit0 = self.repo.get_commit(commit_idx=0)
828 commit0 = self.repo.get_commit(commit_idx=0)
829 assert commit0.branch == 'default'
829 assert commit0.branch == 'default'
830 assert commit0.tags == []
830 assert commit0.tags == []
831
831
832 commit10 = self.repo.get_commit(commit_idx=10)
832 commit10 = self.repo.get_commit(commit_idx=10)
833 assert commit10.branch == 'default'
833 assert commit10.branch == 'default'
834 assert commit10.tags == []
834 assert commit10.tags == []
835
835
836 commit44 = self.repo.get_commit(commit_idx=44)
836 commit44 = self.repo.get_commit(commit_idx=44)
837 assert commit44.branch == 'web'
837 assert commit44.branch == 'web'
838
838
839 tip = self.repo.get_commit('tip')
839 tip = self.repo.get_commit('tip')
840 assert 'tip' in tip.tags
840 assert 'tip' in tip.tags
841
841
842 def test_bookmarks(self):
842 def test_bookmarks(self):
843 commit0 = self.repo.get_commit(commit_idx=0)
843 commit0 = self.repo.get_commit(commit_idx=0)
844 assert commit0.bookmarks == []
844 assert commit0.bookmarks == []
845
845
846 def _test_file_size(self, idx, path, size):
846 def _test_file_size(self, idx, path, size):
847 node = self.repo.get_commit(commit_idx=idx).get_node(path)
847 node = self.repo.get_commit(commit_idx=idx).get_node(path)
848 assert node.is_file()
848 assert node.is_file()
849 assert node.size == size
849 assert node.size == size
850
850
851 def test_file_size(self):
851 def test_file_size(self):
852 to_check = (
852 to_check = (
853 (10, 'setup.py', 1068),
853 (10, 'setup.py', 1068),
854 (20, 'setup.py', 1106),
854 (20, 'setup.py', 1106),
855 (60, 'setup.py', 1074),
855 (60, 'setup.py', 1074),
856
856
857 (10, 'vcs/backends/base.py', 2921),
857 (10, 'vcs/backends/base.py', 2921),
858 (20, 'vcs/backends/base.py', 3936),
858 (20, 'vcs/backends/base.py', 3936),
859 (60, 'vcs/backends/base.py', 6189),
859 (60, 'vcs/backends/base.py', 6189),
860 )
860 )
861 for idx, path, size in to_check:
861 for idx, path, size in to_check:
862 self._test_file_size(idx, path, size)
862 self._test_file_size(idx, path, size)
863
863
864 def test_file_history_from_commits(self):
864 def test_file_history_from_commits(self):
865 node = self.repo[10].get_node('setup.py')
865 node = self.repo[10].get_node('setup.py')
866 commit_ids = [commit.raw_id for commit in node.history]
866 commit_ids = [commit.raw_id for commit in node.history]
867 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
867 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
868
868
869 node = self.repo[20].get_node('setup.py')
869 node = self.repo[20].get_node('setup.py')
870 node_ids = [commit.raw_id for commit in node.history]
870 node_ids = [commit.raw_id for commit in node.history]
871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
873
873
874 # special case we check history from commit that has this particular
874 # special case we check history from commit that has this particular
875 # file changed this means we check if it's included as well
875 # file changed this means we check if it's included as well
876 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
876 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
877 .get_node('setup.py')
877 .get_node('setup.py')
878 node_ids = [commit.raw_id for commit in node.history]
878 node_ids = [commit.raw_id for commit in node.history]
879 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
879 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
880 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
880 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
881
881
882 def test_file_history(self):
882 def test_file_history(self):
883 # we can only check if those commits are present in the history
883 # we can only check if those commits are present in the history
884 # as we cannot update this test every time file is changed
884 # as we cannot update this test every time file is changed
885 files = {
885 files = {
886 'setup.py': [7, 18, 45, 46, 47, 69, 77],
886 'setup.py': [7, 18, 45, 46, 47, 69, 77],
887 'vcs/nodes.py': [
887 'vcs/nodes.py': [
888 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
888 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
889 'vcs/backends/hg.py': [
889 'vcs/backends/hg.py': [
890 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
890 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
891 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
891 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
892 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
892 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
893 }
893 }
894 for path, indexes in files.items():
894 for path, indexes in files.items():
895 tip = self.repo.get_commit(commit_idx=indexes[-1])
895 tip = self.repo.get_commit(commit_idx=indexes[-1])
896 node = tip.get_node(path)
896 node = tip.get_node(path)
897 node_indexes = [commit.idx for commit in node.history]
897 node_indexes = [commit.idx for commit in node.history]
898 assert set(indexes).issubset(set(node_indexes)), (
898 assert set(indexes).issubset(set(node_indexes)), (
899 "We assumed that %s is subset of commits for which file %s "
899 "We assumed that %s is subset of commits for which file %s "
900 "has been changed, and history of that node returned: %s"
900 "has been changed, and history of that node returned: %s"
901 % (indexes, path, node_indexes))
901 % (indexes, path, node_indexes))
902
902
903 def test_file_annotate(self):
903 def test_file_annotate(self):
904 files = {
904 files = {
905 'vcs/backends/__init__.py': {
905 'vcs/backends/__init__.py': {
906 89: {
906 89: {
907 'lines_no': 31,
907 'lines_no': 31,
908 'commits': [
908 'commits': [
909 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
909 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
910 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
910 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
911 32, 32, 32, 32, 37, 32, 37, 37, 32,
911 32, 32, 32, 32, 37, 32, 37, 37, 32,
912 32, 32
912 32, 32
913 ]
913 ]
914 },
914 },
915 20: {
915 20: {
916 'lines_no': 1,
916 'lines_no': 1,
917 'commits': [4]
917 'commits': [4]
918 },
918 },
919 55: {
919 55: {
920 'lines_no': 31,
920 'lines_no': 31,
921 'commits': [
921 'commits': [
922 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
922 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
923 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
923 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
924 32, 32, 32, 32, 37, 32, 37, 37, 32,
924 32, 32, 32, 32, 37, 32, 37, 37, 32,
925 32, 32
925 32, 32
926 ]
926 ]
927 }
927 }
928 },
928 },
929 'vcs/exceptions.py': {
929 'vcs/exceptions.py': {
930 89: {
930 89: {
931 'lines_no': 18,
931 'lines_no': 18,
932 'commits': [
932 'commits': [
933 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
933 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
934 16, 16, 17, 16, 16, 18, 18, 18
934 16, 16, 17, 16, 16, 18, 18, 18
935 ]
935 ]
936 },
936 },
937 20: {
937 20: {
938 'lines_no': 18,
938 'lines_no': 18,
939 'commits': [
939 'commits': [
940 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
940 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
941 16, 16, 17, 16, 16, 18, 18, 18
941 16, 16, 17, 16, 16, 18, 18, 18
942 ]
942 ]
943 },
943 },
944 55: {
944 55: {
945 'lines_no': 18,
945 'lines_no': 18,
946 'commits': [
946 'commits': [
947 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
947 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
948 17, 16, 16, 18, 18, 18
948 17, 16, 16, 18, 18, 18
949 ]
949 ]
950 }
950 }
951 },
951 },
952 'MANIFEST.in': {
952 'MANIFEST.in': {
953 89: {
953 89: {
954 'lines_no': 5,
954 'lines_no': 5,
955 'commits': [7, 7, 7, 71, 71]
955 'commits': [7, 7, 7, 71, 71]
956 },
956 },
957 20: {
957 20: {
958 'lines_no': 3,
958 'lines_no': 3,
959 'commits': [7, 7, 7]
959 'commits': [7, 7, 7]
960 },
960 },
961 55: {
961 55: {
962 'lines_no': 3,
962 'lines_no': 3,
963 'commits': [7, 7, 7]
963 'commits': [7, 7, 7]
964 }
964 }
965 }
965 }
966 }
966 }
967
967
968 for fname, commit_dict in files.items():
968 for fname, commit_dict in files.items():
969 for idx, __ in commit_dict.items():
969 for idx, __ in commit_dict.items():
970 commit = self.repo.get_commit(commit_idx=idx)
970 commit = self.repo.get_commit(commit_idx=idx)
971 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
971 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
972 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
972 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
973 assert l1_1 == l1_2
973 assert l1_1 == l1_2
974 l1 = l1_2 = [
974 l1 = l1_2 = [
975 x[2]().idx for x in commit.get_file_annotate(fname)]
975 x[2]().idx for x in commit.get_file_annotate(fname)]
976 l2 = files[fname][idx]['commits']
976 l2 = files[fname][idx]['commits']
977 assert l1 == l2, (
977 assert l1 == l2, (
978 "The lists of commit for %s@commit_id%s"
978 "The lists of commit for %s@commit_id%s"
979 "from annotation list should match each other,"
979 "from annotation list should match each other,"
980 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
980 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
981
981
982 def test_commit_state(self):
982 def test_commit_state(self):
983 """
983 """
984 Tests which files have been added/changed/removed at particular commit
984 Tests which files have been added/changed/removed at particular commit
985 """
985 """
986
986
987 # commit_id 46ad32a4f974:
987 # commit_id 46ad32a4f974:
988 # hg st --rev 46ad32a4f974
988 # hg st --rev 46ad32a4f974
989 # changed: 13
989 # changed: 13
990 # added: 20
990 # added: 20
991 # removed: 1
991 # removed: 1
992 changed = set([
992 changed = set([
993 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
993 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
994 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
994 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
995 'vcs/__init__.py', 'vcs/backends/__init__.py',
995 'vcs/__init__.py', 'vcs/backends/__init__.py',
996 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
996 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
997 'vcs/utils/__init__.py'])
997 'vcs/utils/__init__.py'])
998
998
999 added = set([
999 added = set([
1000 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
1000 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
1001 'docs/api/index.rst', 'docs/api/nodes.rst',
1001 'docs/api/index.rst', 'docs/api/nodes.rst',
1002 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
1002 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
1003 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
1003 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
1004 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1004 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1005 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1005 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1006 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1006 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1007 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1007 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1008 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1008 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1009 'vcs/web/simplevcs/views.py'])
1009 'vcs/web/simplevcs/views.py'])
1010
1010
1011 removed = set(['docs/api.rst'])
1011 removed = set(['docs/api.rst'])
1012
1012
1013 commit64 = self.repo.get_commit('46ad32a4f974')
1013 commit64 = self.repo.get_commit('46ad32a4f974')
1014 assert set((node.path for node in commit64.added)) == added
1014 assert set((node.path for node in commit64.added)) == added
1015 assert set((node.path for node in commit64.changed)) == changed
1015 assert set((node.path for node in commit64.changed)) == changed
1016 assert set((node.path for node in commit64.removed)) == removed
1016 assert set((node.path for node in commit64.removed)) == removed
1017
1017
1018 # commit_id b090f22d27d6:
1018 # commit_id b090f22d27d6:
1019 # hg st --rev b090f22d27d6
1019 # hg st --rev b090f22d27d6
1020 # changed: 13
1020 # changed: 13
1021 # added: 20
1021 # added: 20
1022 # removed: 1
1022 # removed: 1
1023 commit88 = self.repo.get_commit('b090f22d27d6')
1023 commit88 = self.repo.get_commit('b090f22d27d6')
1024 assert set((node.path for node in commit88.added)) == set()
1024 assert set((node.path for node in commit88.added)) == set()
1025 assert set((node.path for node in commit88.changed)) == \
1025 assert set((node.path for node in commit88.changed)) == \
1026 set(['.hgignore'])
1026 set(['.hgignore'])
1027 assert set((node.path for node in commit88.removed)) == set()
1027 assert set((node.path for node in commit88.removed)) == set()
1028
1028
1029 #
1029 #
1030 # 85:
1030 # 85:
1031 # added: 2 [
1031 # added: 2 [
1032 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1032 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1033 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1033 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1034 # removed: 1 ['vcs/utils/web.py']
1034 # removed: 1 ['vcs/utils/web.py']
1035 commit85 = self.repo.get_commit(commit_idx=85)
1035 commit85 = self.repo.get_commit(commit_idx=85)
1036 assert set((node.path for node in commit85.added)) == set([
1036 assert set((node.path for node in commit85.added)) == set([
1037 'vcs/utils/diffs.py',
1037 'vcs/utils/diffs.py',
1038 'vcs/web/simplevcs/views/diffs.py'])
1038 'vcs/web/simplevcs/views/diffs.py'])
1039 assert set((node.path for node in commit85.changed)) == set([
1039 assert set((node.path for node in commit85.changed)) == set([
1040 'vcs/web/simplevcs/models.py',
1040 'vcs/web/simplevcs/models.py',
1041 'vcs/web/simplevcs/utils.py',
1041 'vcs/web/simplevcs/utils.py',
1042 'vcs/web/simplevcs/views/__init__.py',
1042 'vcs/web/simplevcs/views/__init__.py',
1043 'vcs/web/simplevcs/views/repository.py',
1043 'vcs/web/simplevcs/views/repository.py',
1044 ])
1044 ])
1045 assert set((node.path for node in commit85.removed)) == \
1045 assert set((node.path for node in commit85.removed)) == \
1046 set(['vcs/utils/web.py'])
1046 set(['vcs/utils/web.py'])
1047
1047
1048 def test_files_state(self):
1048 def test_files_state(self):
1049 """
1049 """
1050 Tests state of FileNodes.
1050 Tests state of FileNodes.
1051 """
1051 """
1052 commit = self.repo.get_commit(commit_idx=85)
1052 commit = self.repo.get_commit(commit_idx=85)
1053 node = commit.get_node('vcs/utils/diffs.py')
1053 node = commit.get_node('vcs/utils/diffs.py')
1054 assert node.state, NodeState.ADDED
1054 assert node.state, NodeState.ADDED
1055 assert node.added
1055 assert node.added
1056 assert not node.changed
1056 assert not node.changed
1057 assert not node.not_changed
1057 assert not node.not_changed
1058 assert not node.removed
1058 assert not node.removed
1059
1059
1060 commit = self.repo.get_commit(commit_idx=88)
1060 commit = self.repo.get_commit(commit_idx=88)
1061 node = commit.get_node('.hgignore')
1061 node = commit.get_node('.hgignore')
1062 assert node.state, NodeState.CHANGED
1062 assert node.state, NodeState.CHANGED
1063 assert not node.added
1063 assert not node.added
1064 assert node.changed
1064 assert node.changed
1065 assert not node.not_changed
1065 assert not node.not_changed
1066 assert not node.removed
1066 assert not node.removed
1067
1067
1068 commit = self.repo.get_commit(commit_idx=85)
1068 commit = self.repo.get_commit(commit_idx=85)
1069 node = commit.get_node('setup.py')
1069 node = commit.get_node('setup.py')
1070 assert node.state, NodeState.NOT_CHANGED
1070 assert node.state, NodeState.NOT_CHANGED
1071 assert not node.added
1071 assert not node.added
1072 assert not node.changed
1072 assert not node.changed
1073 assert node.not_changed
1073 assert node.not_changed
1074 assert not node.removed
1074 assert not node.removed
1075
1075
1076 # If node has REMOVED state then trying to fetch it would raise
1076 # If node has REMOVED state then trying to fetch it would raise
1077 # CommitError exception
1077 # CommitError exception
1078 commit = self.repo.get_commit(commit_idx=2)
1078 commit = self.repo.get_commit(commit_idx=2)
1079 path = 'vcs/backends/BaseRepository.py'
1079 path = 'vcs/backends/BaseRepository.py'
1080 with pytest.raises(NodeDoesNotExistError):
1080 with pytest.raises(NodeDoesNotExistError):
1081 commit.get_node(path)
1081 commit.get_node(path)
1082 # but it would be one of ``removed`` (commit's attribute)
1082 # but it would be one of ``removed`` (commit's attribute)
1083 assert path in [rf.path for rf in commit.removed]
1083 assert path in [rf.path for rf in commit.removed]
1084
1084
1085 def test_commit_message_is_unicode(self):
1085 def test_commit_message_is_unicode(self):
1086 for cm in self.repo:
1086 for cm in self.repo:
1087 assert type(cm.message) == unicode
1087 assert type(cm.message) == unicode
1088
1088
1089 def test_commit_author_is_unicode(self):
1089 def test_commit_author_is_unicode(self):
1090 for cm in self.repo:
1090 for cm in self.repo:
1091 assert type(cm.author) == unicode
1091 assert type(cm.author) == unicode
1092
1092
1093 def test_repo_files_content_is_unicode(self):
1093 def test_repo_files_content_is_unicode(self):
1094 test_commit = self.repo.get_commit(commit_idx=100)
1094 test_commit = self.repo.get_commit(commit_idx=100)
1095 for node in test_commit.get_node('/'):
1095 for node in test_commit.get_node('/'):
1096 if node.is_file():
1096 if node.is_file():
1097 assert type(node.content) == unicode
1097 assert type(node.content) == unicode
1098
1098
1099 def test_wrong_path(self):
1099 def test_wrong_path(self):
1100 # There is 'setup.py' in the root dir but not there:
1100 # There is 'setup.py' in the root dir but not there:
1101 path = 'foo/bar/setup.py'
1101 path = 'foo/bar/setup.py'
1102 with pytest.raises(VCSError):
1102 with pytest.raises(VCSError):
1103 self.repo.get_commit().get_node(path)
1103 self.repo.get_commit().get_node(path)
1104
1104
1105 def test_author_email(self):
1105 def test_author_email(self):
1106 assert 'marcin@python-blog.com' == \
1106 assert 'marcin@python-blog.com' == \
1107 self.repo.get_commit('b986218ba1c9').author_email
1107 self.repo.get_commit('b986218ba1c9').author_email
1108 assert 'lukasz.balcerzak@python-center.pl' == \
1108 assert 'lukasz.balcerzak@python-center.pl' == \
1109 self.repo.get_commit('3803844fdbd3').author_email
1109 self.repo.get_commit('3803844fdbd3').author_email
1110 assert '' == self.repo.get_commit('84478366594b').author_email
1110 assert '' == self.repo.get_commit('84478366594b').author_email
1111
1111
1112 def test_author_username(self):
1112 def test_author_username(self):
1113 assert 'Marcin Kuzminski' == \
1113 assert 'Marcin Kuzminski' == \
1114 self.repo.get_commit('b986218ba1c9').author_name
1114 self.repo.get_commit('b986218ba1c9').author_name
1115 assert 'Lukasz Balcerzak' == \
1115 assert 'Lukasz Balcerzak' == \
1116 self.repo.get_commit('3803844fdbd3').author_name
1116 self.repo.get_commit('3803844fdbd3').author_name
1117 assert 'marcink' == \
1117 assert 'marcink' == \
1118 self.repo.get_commit('84478366594b').author_name
1118 self.repo.get_commit('84478366594b').author_name
1119
1119
1120
1120
1121 class TestLargeFileRepo(object):
1121 class TestLargeFileRepo(object):
1122
1122
1123 def test_large_file(self, backend_hg):
1123 def test_large_file(self, backend_hg):
1124 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1124 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1125
1125
1126 tip = repo.scm_instance().get_commit()
1126 tip = repo.scm_instance().get_commit()
1127 node = tip.get_node('.hglf/thisfileislarge')
1127 node = tip.get_node('.hglf/thisfileislarge')
1128
1128
1129 lf_node = node.get_largefile_node()
1129 lf_node = node.get_largefile_node()
1130
1130
1131 assert lf_node.is_largefile() is True
1131 assert lf_node.is_largefile() is True
1132 assert lf_node.size == 1024000
1132 assert lf_node.size == 1024000
1133 assert lf_node.name == '.hglf/thisfileislarge'
1133 assert lf_node.name == '.hglf/thisfileislarge'
1134
1134
1135
1135
1136 class TestGetBranchName(object):
1136 class TestGetBranchName(object):
1137 def test_returns_ref_name_when_type_is_branch(self):
1137 def test_returns_ref_name_when_type_is_branch(self):
1138 ref = self._create_ref('branch', 'fake-name')
1138 ref = self._create_ref('branch', 'fake-name')
1139 result = self.repo._get_branch_name(ref)
1139 result = self.repo._get_branch_name(ref)
1140 assert result == ref.name
1140 assert result == ref.name
1141
1141
1142 @pytest.mark.parametrize("type_", ("book", "tag"))
1142 @pytest.mark.parametrize("type_", ("book", "tag"))
1143 def test_queries_remote_when_type_is_not_branch(self, type_):
1143 def test_queries_remote_when_type_is_not_branch(self, type_):
1144 ref = self._create_ref(type_, 'wrong-fake-name')
1144 ref = self._create_ref(type_, 'wrong-fake-name')
1145 with mock.patch.object(self.repo, "_remote") as remote_mock:
1145 with mock.patch.object(self.repo, "_remote") as remote_mock:
1146 remote_mock.ctx_branch.return_value = "fake-name"
1146 remote_mock.ctx_branch.return_value = "fake-name"
1147 result = self.repo._get_branch_name(ref)
1147 result = self.repo._get_branch_name(ref)
1148 assert result == "fake-name"
1148 assert result == "fake-name"
1149 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1149 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1150
1150
1151 def _create_ref(self, type_, name):
1151 def _create_ref(self, type_, name):
1152 ref = mock.Mock()
1152 ref = mock.Mock()
1153 ref.type = type_
1153 ref.type = type_
1154 ref.name = 'wrong-fake-name'
1154 ref.name = 'wrong-fake-name'
1155 ref.commit_id = "deadbeef"
1155 ref.commit_id = "deadbeef"
1156 return ref
1156 return ref
1157
1157
1158
1158
1159 class TestIsTheSameBranch(object):
1159 class TestIsTheSameBranch(object):
1160 def test_returns_true_when_branches_are_equal(self):
1160 def test_returns_true_when_branches_are_equal(self):
1161 source_ref = mock.Mock(name="source-ref")
1161 source_ref = mock.Mock(name="source-ref")
1162 target_ref = mock.Mock(name="target-ref")
1162 target_ref = mock.Mock(name="target-ref")
1163 branch_name_patcher = mock.patch.object(
1163 branch_name_patcher = mock.patch.object(
1164 self.repo, "_get_branch_name", return_value="default")
1164 self.repo, "_get_branch_name", return_value="default")
1165 with branch_name_patcher as branch_name_mock:
1165 with branch_name_patcher as branch_name_mock:
1166 result = self.repo._is_the_same_branch(source_ref, target_ref)
1166 result = self.repo._is_the_same_branch(source_ref, target_ref)
1167
1167
1168 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1168 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1169 assert branch_name_mock.call_args_list == expected_calls
1169 assert branch_name_mock.call_args_list == expected_calls
1170 assert result is True
1170 assert result is True
1171
1171
1172 def test_returns_false_when_branches_are_not_equal(self):
1172 def test_returns_false_when_branches_are_not_equal(self):
1173 source_ref = mock.Mock(name="source-ref")
1173 source_ref = mock.Mock(name="source-ref")
1174 source_ref.name = "source-branch"
1174 source_ref.name = "source-branch"
1175 target_ref = mock.Mock(name="target-ref")
1175 target_ref = mock.Mock(name="target-ref")
1176 source_ref.name = "target-branch"
1176 source_ref.name = "target-branch"
1177
1177
1178 def side_effect(ref):
1178 def side_effect(ref):
1179 return ref.name
1179 return ref.name
1180
1180
1181 branch_name_patcher = mock.patch.object(
1181 branch_name_patcher = mock.patch.object(
1182 self.repo, "_get_branch_name", side_effect=side_effect)
1182 self.repo, "_get_branch_name", side_effect=side_effect)
1183 with branch_name_patcher as branch_name_mock:
1183 with branch_name_patcher as branch_name_mock:
1184 result = self.repo._is_the_same_branch(source_ref, target_ref)
1184 result = self.repo._is_the_same_branch(source_ref, target_ref)
1185
1185
1186 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1186 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1187 assert branch_name_mock.call_args_list == expected_calls
1187 assert branch_name_mock.call_args_list == expected_calls
1188 assert result is False
1188 assert result is False
@@ -1,349 +1,349 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Tests so called "in memory commits" commit API of vcs.
22 Tests so called "in memory commits" commit API of vcs.
23 """
23 """
24 import datetime
24 import datetime
25
25
26 import pytest
26 import pytest
27
27
28 from rhodecode.lib.utils2 import safe_unicode
28 from rhodecode.lib.utils2 import safe_unicode
29 from rhodecode.lib.vcs.exceptions import (
29 from rhodecode.lib.vcs.exceptions import (
30 EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyExistsError,
30 EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyExistsError,
31 NodeAlreadyRemovedError, NodeAlreadyChangedError, NodeDoesNotExistError,
31 NodeAlreadyRemovedError, NodeAlreadyChangedError, NodeDoesNotExistError,
32 NodeNotChangedError)
32 NodeNotChangedError)
33 from rhodecode.lib.vcs.nodes import DirNode, FileNode
33 from rhodecode.lib.vcs.nodes import DirNode, FileNode
34 from rhodecode.tests.vcs.conftest import BackendTestMixin
34 from rhodecode.tests.vcs.conftest import BackendTestMixin
35
35
36
36
37 @pytest.fixture
37 @pytest.fixture()
38 def nodes():
38 def nodes():
39 nodes = [
39 nodes = [
40 FileNode('foobar', content='Foo & bar'),
40 FileNode('foobar', content='Foo & bar'),
41 FileNode('foobar2', content='Foo & bar, doubled!'),
41 FileNode('foobar2', content='Foo & bar, doubled!'),
42 FileNode('foo bar with spaces', content=''),
42 FileNode('foo bar with spaces', content=''),
43 FileNode('foo/bar/baz', content='Inside'),
43 FileNode('foo/bar/baz', content='Inside'),
44 FileNode(
44 FileNode(
45 'foo/bar/file.bin',
45 'foo/bar/file.bin',
46 content=(
46 content=(
47 '\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00'
47 '\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00'
48 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe'
48 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe'
49 '\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
49 '\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
50 '\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00'
50 '\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00'
51 '\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00'
51 '\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00'
52 '\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff'
52 '\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff'
53 '\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
53 '\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
54 )
54 )
55 ),
55 ),
56 ]
56 ]
57 return nodes
57 return nodes
58
58
59
59
60 @pytest.mark.usefixtures("vcs_repository_support")
60 @pytest.mark.usefixtures("vcs_repository_support")
61 class TestInMemoryCommit(BackendTestMixin):
61 class TestInMemoryCommit(BackendTestMixin):
62 """
62 """
63 This is a backend independent test case class which should be created
63 This is a backend independent test case class which should be created
64 with ``type`` method.
64 with ``type`` method.
65
65
66 It is required to set following attributes at subclass:
66 It is required to set following attributes at subclass:
67
67
68 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
68 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
69 """
69 """
70
70
71 @classmethod
71 @classmethod
72 def _get_commits(cls):
72 def _get_commits(cls):
73 return []
73 return []
74
74
75 def test_add(self, nodes):
75 def test_add(self, nodes):
76 for node in nodes:
76 for node in nodes:
77 self.imc.add(node)
77 self.imc.add(node)
78
78
79 self.commit()
79 self.commit()
80 self.assert_succesful_commit(nodes)
80 self.assert_succesful_commit(nodes)
81
81
82 @pytest.mark.backends("hg")
82 @pytest.mark.backends("hg")
83 def test_add_on_branch_hg(self, nodes):
83 def test_add_on_branch_hg(self, nodes):
84 for node in nodes:
84 for node in nodes:
85 self.imc.add(node)
85 self.imc.add(node)
86 self.commit(branch=u'stable')
86 self.commit(branch=u'stable')
87 self.assert_succesful_commit(nodes)
87 self.assert_succesful_commit(nodes)
88
88
89 @pytest.mark.backends("git")
89 @pytest.mark.backends("git")
90 def test_add_on_branch_git(self, nodes):
90 def test_add_on_branch_git(self, nodes):
91 for node in nodes:
91 for node in nodes:
92 self.imc.add(node)
92 self.imc.add(node)
93 self.commit(branch=u'stable')
93 self.commit(branch=u'stable')
94 self.assert_succesful_commit(nodes)
94 self.assert_succesful_commit(nodes)
95
95
96 def test_add_in_bulk(self, nodes):
96 def test_add_in_bulk(self, nodes):
97 self.imc.add(*nodes)
97 self.imc.add(*nodes)
98
98
99 self.commit()
99 self.commit()
100 self.assert_succesful_commit(nodes)
100 self.assert_succesful_commit(nodes)
101
101
102 def test_add_non_ascii_files(self):
102 def test_add_non_ascii_files(self):
103 nodes = [
103 nodes = [
104 FileNode('żółwik/zwierzątko_utf8_str', content='ćććć'),
104 FileNode('żółwik/zwierzątko_utf8_str', content='ćććć'),
105 FileNode(u'żółwik/zwierzątko_unicode', content=u'ćććć'),
105 FileNode(u'żółwik/zwierzątko_unicode', content=u'ćććć'),
106 ]
106 ]
107
107
108 for node in nodes:
108 for node in nodes:
109 self.imc.add(node)
109 self.imc.add(node)
110
110
111 self.commit()
111 self.commit()
112 self.assert_succesful_commit(nodes)
112 self.assert_succesful_commit(nodes)
113
113
114 def commit(self, branch=None):
114 def commit(self, branch=None):
115 self.old_commit_count = len(self.repo.commit_ids)
115 self.old_commit_count = len(self.repo.commit_ids)
116 self.commit_message = u'Test commit with unicode: żółwik'
116 self.commit_message = u'Test commit with unicode: żółwik'
117 self.commit_author = u'{} <foo@email.com>'.format(self.__class__.__name__)
117 self.commit_author = u'{} <foo@email.com>'.format(self.__class__.__name__)
118 self.commit = self.imc.commit(
118 self.commit = self.imc.commit(
119 message=self.commit_message, author=self.commit_author,
119 message=self.commit_message, author=self.commit_author,
120 branch=branch)
120 branch=branch)
121
121
122 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
122 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
123 to_add = [
123 to_add = [
124 FileNode('foo/bar/image.png', content='\0'),
124 FileNode('foo/bar/image.png', content='\0'),
125 FileNode('foo/README.txt', content='readme!'),
125 FileNode('foo/README.txt', content='readme!'),
126 ]
126 ]
127 self.imc.add(*to_add)
127 self.imc.add(*to_add)
128 commit = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
128 commit = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
129 assert isinstance(commit.get_node('foo'), DirNode)
129 assert isinstance(commit.get_node('foo'), DirNode)
130 assert isinstance(commit.get_node('foo/bar'), DirNode)
130 assert isinstance(commit.get_node('foo/bar'), DirNode)
131 self.assert_nodes_in_commit(commit, to_add)
131 self.assert_nodes_in_commit(commit, to_add)
132
132
133 # commit some more files again
133 # commit some more files again
134 to_add = [
134 to_add = [
135 FileNode('foo/bar/foobaz/bar', content='foo'),
135 FileNode('foo/bar/foobaz/bar', content='foo'),
136 FileNode('foo/bar/another/bar', content='foo'),
136 FileNode('foo/bar/another/bar', content='foo'),
137 FileNode('foo/baz.txt', content='foo'),
137 FileNode('foo/baz.txt', content='foo'),
138 FileNode('foobar/foobaz/file', content='foo'),
138 FileNode('foobar/foobaz/file', content='foo'),
139 FileNode('foobar/barbaz', content='foo'),
139 FileNode('foobar/barbaz', content='foo'),
140 ]
140 ]
141 self.imc.add(*to_add)
141 self.imc.add(*to_add)
142 commit = self.imc.commit(u'Another', u'joe doe <joe.doe@example.com>')
142 commit = self.imc.commit(u'Another', u'joe doe <joe.doe@example.com>')
143 self.assert_nodes_in_commit(commit, to_add)
143 self.assert_nodes_in_commit(commit, to_add)
144
144
145 def test_add_raise_already_added(self):
145 def test_add_raise_already_added(self):
146 node = FileNode('foobar', content='baz')
146 node = FileNode('foobar', content='baz')
147 self.imc.add(node)
147 self.imc.add(node)
148 with pytest.raises(NodeAlreadyAddedError):
148 with pytest.raises(NodeAlreadyAddedError):
149 self.imc.add(node)
149 self.imc.add(node)
150
150
151 def test_check_integrity_raise_already_exist(self):
151 def test_check_integrity_raise_already_exist(self):
152 node = FileNode('foobar', content='baz')
152 node = FileNode('foobar', content='baz')
153 self.imc.add(node)
153 self.imc.add(node)
154 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
154 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
155 self.imc.add(node)
155 self.imc.add(node)
156 with pytest.raises(NodeAlreadyExistsError):
156 with pytest.raises(NodeAlreadyExistsError):
157 self.imc.commit(message='new message', author=u'{} <foo@bar.com>'.format(self))
157 self.imc.commit(message='new message', author=u'{} <foo@bar.com>'.format(self))
158
158
159 def test_change(self):
159 def test_change(self):
160 self.imc.add(FileNode('foo/bar/baz', content='foo'))
160 self.imc.add(FileNode('foo/bar/baz', content='foo'))
161 self.imc.add(FileNode('foo/fbar', content='foobar'))
161 self.imc.add(FileNode('foo/fbar', content='foobar'))
162 tip = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
162 tip = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
163
163
164 # Change node's content
164 # Change node's content
165 node = FileNode('foo/bar/baz', content='My **changed** content')
165 node = FileNode('foo/bar/baz', content='My **changed** content')
166 self.imc.change(node)
166 self.imc.change(node)
167 self.imc.commit(u'Changed %s' % node.path, u'joe doe <joe.doe@example.com>')
167 self.imc.commit(u'Changed %s' % node.path, u'joe doe <joe.doe@example.com>')
168
168
169 newtip = self.repo.get_commit()
169 newtip = self.repo.get_commit()
170 assert tip != newtip
170 assert tip != newtip
171 assert tip.id != newtip.id
171 assert tip.id != newtip.id
172 self.assert_nodes_in_commit(newtip, (node,))
172 self.assert_nodes_in_commit(newtip, (node,))
173
173
174 def test_change_non_ascii(self):
174 def test_change_non_ascii(self):
175 to_add = [
175 to_add = [
176 FileNode('żółwik/zwierzątko', content='ćććć'),
176 FileNode('żółwik/zwierzątko', content='ćććć'),
177 FileNode(u'żółwik/zwierzątko_uni', content=u'ćććć'),
177 FileNode(u'żółwik/zwierzątko_uni', content=u'ćććć'),
178 ]
178 ]
179 for node in to_add:
179 for node in to_add:
180 self.imc.add(node)
180 self.imc.add(node)
181
181
182 tip = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
182 tip = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
183
183
184 # Change node's content
184 # Change node's content
185 node = FileNode('żółwik/zwierzątko', content='My **changed** content')
185 node = FileNode('żółwik/zwierzątko', content='My **changed** content')
186 self.imc.change(node)
186 self.imc.change(node)
187 self.imc.commit(u'Changed %s' % safe_unicode(node.path),
187 self.imc.commit(u'Changed %s' % safe_unicode(node.path),
188 author=u'joe doe <joe.doe@example.com>')
188 author=u'joe doe <joe.doe@example.com>')
189
189
190 node_uni = FileNode(
190 node_uni = FileNode(
191 u'żółwik/zwierzątko_uni', content=u'My **changed** content')
191 u'żółwik/zwierzątko_uni', content=u'My **changed** content')
192 self.imc.change(node_uni)
192 self.imc.change(node_uni)
193 self.imc.commit(u'Changed %s' % safe_unicode(node_uni.path),
193 self.imc.commit(u'Changed %s' % safe_unicode(node_uni.path),
194 author=u'joe doe <joe.doe@example.com>')
194 author=u'joe doe <joe.doe@example.com>')
195
195
196 newtip = self.repo.get_commit()
196 newtip = self.repo.get_commit()
197 assert tip != newtip
197 assert tip != newtip
198 assert tip.id != newtip.id
198 assert tip.id != newtip.id
199
199
200 self.assert_nodes_in_commit(newtip, (node, node_uni))
200 self.assert_nodes_in_commit(newtip, (node, node_uni))
201
201
202 def test_change_raise_empty_repository(self):
202 def test_change_raise_empty_repository(self):
203 node = FileNode('foobar')
203 node = FileNode('foobar')
204 with pytest.raises(EmptyRepositoryError):
204 with pytest.raises(EmptyRepositoryError):
205 self.imc.change(node)
205 self.imc.change(node)
206
206
207 def test_check_integrity_change_raise_node_does_not_exist(self):
207 def test_check_integrity_change_raise_node_does_not_exist(self):
208 node = FileNode('foobar', content='baz')
208 node = FileNode('foobar', content='baz')
209 self.imc.add(node)
209 self.imc.add(node)
210 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
210 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
211 node = FileNode('not-foobar', content='')
211 node = FileNode('not-foobar', content='')
212 self.imc.change(node)
212 self.imc.change(node)
213 with pytest.raises(NodeDoesNotExistError):
213 with pytest.raises(NodeDoesNotExistError):
214 self.imc.commit(message='Changed not existing node', author=u'{} <foo@bar.com>'.format(self))
214 self.imc.commit(message='Changed not existing node', author=u'{} <foo@bar.com>'.format(self))
215
215
216 def test_change_raise_node_already_changed(self):
216 def test_change_raise_node_already_changed(self):
217 node = FileNode('foobar', content='baz')
217 node = FileNode('foobar', content='baz')
218 self.imc.add(node)
218 self.imc.add(node)
219 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
219 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
220 node = FileNode('foobar', content='more baz')
220 node = FileNode('foobar', content='more baz')
221 self.imc.change(node)
221 self.imc.change(node)
222 with pytest.raises(NodeAlreadyChangedError):
222 with pytest.raises(NodeAlreadyChangedError):
223 self.imc.change(node)
223 self.imc.change(node)
224
224
225 def test_check_integrity_change_raise_node_not_changed(self, nodes):
225 def test_check_integrity_change_raise_node_not_changed(self, nodes):
226 self.test_add(nodes) # Performs first commit
226 self.test_add(nodes) # Performs first commit
227
227
228 node = FileNode(nodes[0].path, content=nodes[0].content)
228 node = FileNode(nodes[0].path, content=nodes[0].content)
229 self.imc.change(node)
229 self.imc.change(node)
230 with pytest.raises(NodeNotChangedError):
230 with pytest.raises(NodeNotChangedError):
231 self.imc.commit(
231 self.imc.commit(
232 message=u'Trying to mark node as changed without touching it',
232 message=u'Trying to mark node as changed without touching it',
233 author=u'{} <foo@bar.com>'.format(self))
233 author=u'{} <foo@bar.com>'.format(self))
234
234
235 def test_change_raise_node_already_removed(self):
235 def test_change_raise_node_already_removed(self):
236 node = FileNode('foobar', content='baz')
236 node = FileNode('foobar', content='baz')
237 self.imc.add(node)
237 self.imc.add(node)
238 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
238 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
239 self.imc.remove(FileNode('foobar'))
239 self.imc.remove(FileNode('foobar'))
240 with pytest.raises(NodeAlreadyRemovedError):
240 with pytest.raises(NodeAlreadyRemovedError):
241 self.imc.change(node)
241 self.imc.change(node)
242
242
243 def test_remove(self, nodes):
243 def test_remove(self, nodes):
244 self.test_add(nodes) # Performs first commit
244 self.test_add(nodes) # Performs first commit
245
245
246 tip = self.repo.get_commit()
246 tip = self.repo.get_commit()
247 node = nodes[0]
247 node = nodes[0]
248 assert node.content == tip.get_node(node.path).content
248 assert node.content == tip.get_node(node.path).content
249 self.imc.remove(node)
249 self.imc.remove(node)
250 self.imc.commit(
250 self.imc.commit(
251 message=u'Removed %s' % node.path, author=u'{} <foo@bar.com>'.format(self))
251 message=u'Removed %s' % node.path, author=u'{} <foo@bar.com>'.format(self))
252
252
253 newtip = self.repo.get_commit()
253 newtip = self.repo.get_commit()
254 assert tip != newtip
254 assert tip != newtip
255 assert tip.id != newtip.id
255 assert tip.id != newtip.id
256 with pytest.raises(NodeDoesNotExistError):
256 with pytest.raises(NodeDoesNotExistError):
257 newtip.get_node(node.path)
257 newtip.get_node(node.path)
258
258
259 def test_remove_last_file_from_directory(self):
259 def test_remove_last_file_from_directory(self):
260 node = FileNode('omg/qwe/foo/bar', content='foobar')
260 node = FileNode('omg/qwe/foo/bar', content='foobar')
261 self.imc.add(node)
261 self.imc.add(node)
262 self.imc.commit(u'added', author=u'joe doe <joe@doe.com>')
262 self.imc.commit(u'added', author=u'joe doe <joe@doe.com>')
263
263
264 self.imc.remove(node)
264 self.imc.remove(node)
265 tip = self.imc.commit(u'removed', u'joe doe <joe@doe.com>')
265 tip = self.imc.commit(u'removed', u'joe doe <joe@doe.com>')
266 with pytest.raises(NodeDoesNotExistError):
266 with pytest.raises(NodeDoesNotExistError):
267 tip.get_node('omg/qwe/foo/bar')
267 tip.get_node('omg/qwe/foo/bar')
268
268
269 def test_remove_raise_node_does_not_exist(self, nodes):
269 def test_remove_raise_node_does_not_exist(self, nodes):
270 self.imc.remove(nodes[0])
270 self.imc.remove(nodes[0])
271 with pytest.raises(NodeDoesNotExistError):
271 with pytest.raises(NodeDoesNotExistError):
272 self.imc.commit(
272 self.imc.commit(
273 message='Trying to remove node at empty repository',
273 message='Trying to remove node at empty repository',
274 author=u'{} <foo@bar.com>'.format(self))
274 author=u'{} <foo@bar.com>'.format(self))
275
275
276 def test_check_integrity_remove_raise_node_does_not_exist(self, nodes):
276 def test_check_integrity_remove_raise_node_does_not_exist(self, nodes):
277 self.test_add(nodes) # Performs first commit
277 self.test_add(nodes) # Performs first commit
278
278
279 node = FileNode('no-such-file')
279 node = FileNode('no-such-file')
280 self.imc.remove(node)
280 self.imc.remove(node)
281 with pytest.raises(NodeDoesNotExistError):
281 with pytest.raises(NodeDoesNotExistError):
282 self.imc.commit(
282 self.imc.commit(
283 message=u'Trying to remove not existing node',
283 message=u'Trying to remove not existing node',
284 author=u'{} <foo@bar.com>'.format(self))
284 author=u'{} <foo@bar.com>'.format(self))
285
285
286 def test_remove_raise_node_already_removed(self, nodes):
286 def test_remove_raise_node_already_removed(self, nodes):
287 self.test_add(nodes) # Performs first commit
287 self.test_add(nodes) # Performs first commit
288
288
289 node = FileNode(nodes[0].path)
289 node = FileNode(nodes[0].path)
290 self.imc.remove(node)
290 self.imc.remove(node)
291 with pytest.raises(NodeAlreadyRemovedError):
291 with pytest.raises(NodeAlreadyRemovedError):
292 self.imc.remove(node)
292 self.imc.remove(node)
293
293
294 def test_remove_raise_node_already_changed(self, nodes):
294 def test_remove_raise_node_already_changed(self, nodes):
295 self.test_add(nodes) # Performs first commit
295 self.test_add(nodes) # Performs first commit
296
296
297 node = FileNode(nodes[0].path, content='Bending time')
297 node = FileNode(nodes[0].path, content='Bending time')
298 self.imc.change(node)
298 self.imc.change(node)
299 with pytest.raises(NodeAlreadyChangedError):
299 with pytest.raises(NodeAlreadyChangedError):
300 self.imc.remove(node)
300 self.imc.remove(node)
301
301
302 def test_reset(self):
302 def test_reset(self):
303 self.imc.add(FileNode('foo', content='bar'))
303 self.imc.add(FileNode('foo', content='bar'))
304 # self.imc.change(FileNode('baz', content='new'))
304 # self.imc.change(FileNode('baz', content='new'))
305 # self.imc.remove(FileNode('qwe'))
305 # self.imc.remove(FileNode('qwe'))
306 self.imc.reset()
306 self.imc.reset()
307 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
307 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
308
308
309 def test_multiple_commits(self):
309 def test_multiple_commits(self):
310 N = 3 # number of commits to perform
310 N = 3 # number of commits to perform
311 last = None
311 last = None
312 for x in xrange(N):
312 for x in xrange(N):
313 fname = 'file%s' % str(x).rjust(5, '0')
313 fname = 'file%s' % str(x).rjust(5, '0')
314 content = 'foobar\n' * x
314 content = 'foobar\n' * x
315 node = FileNode(fname, content=content)
315 node = FileNode(fname, content=content)
316 self.imc.add(node)
316 self.imc.add(node)
317 commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs <foo@bar.com>')
317 commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs <foo@bar.com>')
318 assert last != commit
318 assert last != commit
319 last = commit
319 last = commit
320
320
321 # Check commit number for same repo
321 # Check commit number for same repo
322 assert len(self.repo.commit_ids) == N
322 assert len(self.repo.commit_ids) == N
323
323
324 # Check commit number for recreated repo
324 # Check commit number for recreated repo
325 repo = self.Backend(self.repo_path)
325 repo = self.Backend(self.repo_path)
326 assert len(repo.commit_ids) == N
326 assert len(repo.commit_ids) == N
327
327
328 def test_date_attr(self, local_dt_to_utc):
328 def test_date_attr(self, local_dt_to_utc):
329 node = FileNode('foobar.txt', content='Foobared!')
329 node = FileNode('foobar.txt', content='Foobared!')
330 self.imc.add(node)
330 self.imc.add(node)
331 date = datetime.datetime(1985, 1, 30, 1, 45)
331 date = datetime.datetime(1985, 1, 30, 1, 45)
332 commit = self.imc.commit(
332 commit = self.imc.commit(
333 u"Committed at time when I was born ;-)",
333 u"Committed at time when I was born ;-)",
334 author=u'{} <foo@bar.com>'.format(self), date=date)
334 author=u'{} <foo@bar.com>'.format(self), date=date)
335
335
336 assert commit.date == local_dt_to_utc(date)
336 assert commit.date == local_dt_to_utc(date)
337
337
338 def assert_succesful_commit(self, added_nodes):
338 def assert_succesful_commit(self, added_nodes):
339 newtip = self.repo.get_commit()
339 newtip = self.repo.get_commit()
340 assert self.commit == newtip
340 assert self.commit == newtip
341 assert self.old_commit_count + 1 == len(self.repo.commit_ids)
341 assert self.old_commit_count + 1 == len(self.repo.commit_ids)
342 assert newtip.message == self.commit_message
342 assert newtip.message == self.commit_message
343 assert newtip.author == self.commit_author
343 assert newtip.author == self.commit_author
344 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
344 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
345 self.assert_nodes_in_commit(newtip, added_nodes)
345 self.assert_nodes_in_commit(newtip, added_nodes)
346
346
347 def assert_nodes_in_commit(self, commit, nodes):
347 def assert_nodes_in_commit(self, commit, nodes):
348 for node in nodes:
348 for node in nodes:
349 assert commit.get_node(node.path).content == node.content
349 assert commit.get_node(node.path).content == node.content
@@ -1,186 +1,186 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.tests import SVN_REPO, TEST_DIR, TESTS_TMP_PATH
26 from rhodecode.tests import SVN_REPO, TEST_DIR, TESTS_TMP_PATH
27 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
27 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
28 from rhodecode.lib.vcs.conf import settings
28 from rhodecode.lib.vcs.conf import settings
29 from rhodecode.lib.vcs.exceptions import VCSError
29 from rhodecode.lib.vcs.exceptions import VCSError
30
30
31
31
32 pytestmark = [
32 pytestmark = [
33 pytest.mark.backends("svn"),
33 pytest.mark.backends("svn"),
34 pytest.mark.usefixtures("baseapp"),
34 pytest.mark.usefixtures("baseapp"),
35 ]
35 ]
36
36
37
37
38 @pytest.fixture
38 @pytest.fixture()
39 def repo(baseapp):
39 def repo(baseapp):
40 repo = SubversionRepository(os.path.join(TESTS_TMP_PATH, SVN_REPO))
40 repo = SubversionRepository(os.path.join(TESTS_TMP_PATH, SVN_REPO))
41 return repo
41 return repo
42
42
43
43
44 @pytest.fixture
44 @pytest.fixture()
45 def head(repo):
45 def head(repo):
46 return repo.get_commit()
46 return repo.get_commit()
47
47
48
48
49 def test_init_fails_if_path_does_not_exist():
49 def test_init_fails_if_path_does_not_exist():
50 path = os.path.join(TEST_DIR, 'i-do-not-exist')
50 path = os.path.join(TEST_DIR, 'i-do-not-exist')
51 with pytest.raises(VCSError):
51 with pytest.raises(VCSError):
52 SubversionRepository(path)
52 SubversionRepository(path)
53
53
54
54
55 def test_init_fails_if_path_is_not_a_valid_repository(tmpdir):
55 def test_init_fails_if_path_is_not_a_valid_repository(tmpdir):
56 path = unicode(tmpdir.mkdir(u'unicode ä'))
56 path = unicode(tmpdir.mkdir(u'unicode ä'))
57 with pytest.raises(VCSError):
57 with pytest.raises(VCSError):
58 SubversionRepository(path)
58 SubversionRepository(path)
59
59
60
60
61 def test_repo_clone(vcsbackend, reposerver):
61 def test_repo_clone(vcsbackend, reposerver):
62 source = vcsbackend.create_repo(number_of_commits=3)
62 source = vcsbackend.create_repo(number_of_commits=3)
63 reposerver.serve(source)
63 reposerver.serve(source)
64 repo = SubversionRepository(
64 repo = SubversionRepository(
65 vcsbackend.new_repo_path(),
65 vcsbackend.new_repo_path(),
66 create=True,
66 create=True,
67 src_url=reposerver.url)
67 src_url=reposerver.url)
68
68
69 assert source.commit_ids == repo.commit_ids
69 assert source.commit_ids == repo.commit_ids
70 assert source[0].message == repo[0].message
70 assert source[0].message == repo[0].message
71
71
72
72
73 def test_latest_commit(head):
73 def test_latest_commit(head):
74 assert head.raw_id == '393'
74 assert head.raw_id == '393'
75
75
76
76
77 def test_commit_description(head):
77 def test_commit_description(head):
78 assert head.message == """Added a symlink"""
78 assert head.message == """Added a symlink"""
79
79
80
80
81 def test_commit_author(head):
81 def test_commit_author(head):
82 assert head.author == 'marcin'
82 assert head.author == 'marcin'
83
83
84
84
85 @pytest.mark.parametrize("filename, content, mime_type", [
85 @pytest.mark.parametrize("filename, content, mime_type", [
86 ('test.txt', 'Text content\n', None),
86 ('test.txt', 'Text content\n', None),
87 ('test.bin', '\0 binary \0', 'application/octet-stream'),
87 ('test.bin', '\0 binary \0', 'application/octet-stream'),
88 ], ids=['text', 'binary'])
88 ], ids=['text', 'binary'])
89 def test_sets_mime_type_correctly(vcsbackend, filename, content, mime_type):
89 def test_sets_mime_type_correctly(vcsbackend, filename, content, mime_type):
90 repo = vcsbackend.create_repo()
90 repo = vcsbackend.create_repo()
91 vcsbackend.ensure_file(filename, content)
91 vcsbackend.ensure_file(filename, content)
92 file_properties = repo._remote.node_properties(filename, 1)
92 file_properties = repo._remote.node_properties(filename, 1)
93 assert file_properties.get('svn:mime-type') == mime_type
93 assert file_properties.get('svn:mime-type') == mime_type
94
94
95
95
96 def test_slice_access(repo):
96 def test_slice_access(repo):
97 page_size = 5
97 page_size = 5
98 page = 0
98 page = 0
99 start = page * page_size
99 start = page * page_size
100 end = start + page_size - 1
100 end = start + page_size - 1
101
101
102 commits = list(repo[start:end])
102 commits = list(repo[start:end])
103 assert [commit.raw_id for commit in commits] == ['1', '2', '3', '4']
103 assert [commit.raw_id for commit in commits] == ['1', '2', '3', '4']
104
104
105
105
106 def test_walk_changelog_page(repo):
106 def test_walk_changelog_page(repo):
107 page_size = 5
107 page_size = 5
108 page = 0
108 page = 0
109 start = page * page_size
109 start = page * page_size
110 end = start + page_size - 1
110 end = start + page_size - 1
111
111
112 commits = list(repo[start:end])
112 commits = list(repo[start:end])
113 changelog = [
113 changelog = [
114 'r%s, %s, %s' % (c.raw_id, c.author, c.message) for c in commits]
114 'r%s, %s, %s' % (c.raw_id, c.author, c.message) for c in commits]
115
115
116 expexted_messages = [
116 expexted_messages = [
117 'r1, marcin, initial import',
117 'r1, marcin, initial import',
118 'r2, marcin, hg ignore',
118 'r2, marcin, hg ignore',
119 'r3, marcin, Pip standards refactor',
119 'r3, marcin, Pip standards refactor',
120 'r4, marcin, Base repository few new functions added']
120 'r4, marcin, Base repository few new functions added']
121 assert changelog == expexted_messages
121 assert changelog == expexted_messages
122
122
123
123
124 def test_read_full_file_tree(head):
124 def test_read_full_file_tree(head):
125 for topnode, dirs, files in head.walk():
125 for topnode, dirs, files in head.walk():
126 for f in files:
126 for f in files:
127 len(f.content)
127 len(f.content)
128
128
129
129
130 def test_topnode_files_attribute(head):
130 def test_topnode_files_attribute(head):
131 topnode = head.get_node('')
131 topnode = head.get_node('')
132 topnode.files
132 topnode.files
133
133
134
134
135 @pytest.mark.parametrize("filename, content, branch, mime_type", [
135 @pytest.mark.parametrize("filename, content, branch, mime_type", [
136 (u'branches/plain/test.txt', 'Text content\n', 'plain', None),
136 (u'branches/plain/test.txt', 'Text content\n', 'plain', None),
137 (u'branches/uniçö∂e/test.bin', '\0 binary \0', u'uniçö∂e',
137 (u'branches/uniçö∂e/test.bin', '\0 binary \0', u'uniçö∂e',
138 'application/octet-stream'),
138 'application/octet-stream'),
139 ], ids=['text', 'binary'])
139 ], ids=['text', 'binary'])
140 def test_unicode_refs(vcsbackend, filename, content, branch, mime_type):
140 def test_unicode_refs(vcsbackend, filename, content, branch, mime_type):
141 repo = vcsbackend.create_repo()
141 repo = vcsbackend.create_repo()
142 vcsbackend.ensure_file(filename, content)
142 vcsbackend.ensure_file(filename, content)
143 with mock.patch(("rhodecode.lib.vcs.backends.svn.repository"
143 with mock.patch(("rhodecode.lib.vcs.backends.svn.repository"
144 ".SubversionRepository._patterns_from_section"),
144 ".SubversionRepository._patterns_from_section"),
145 return_value=['branches/*']):
145 return_value=['branches/*']):
146 assert u'branches/{0}'.format(branch) in repo.branches
146 assert u'branches/{0}'.format(branch) in repo.branches
147
147
148
148
149 def test_compatible_version(monkeypatch, vcsbackend):
149 def test_compatible_version(monkeypatch, vcsbackend):
150 monkeypatch.setattr(settings, 'SVN_COMPATIBLE_VERSION', 'pre-1.8-compatible')
150 monkeypatch.setattr(settings, 'SVN_COMPATIBLE_VERSION', 'pre-1.8-compatible')
151 path = vcsbackend.new_repo_path()
151 path = vcsbackend.new_repo_path()
152 SubversionRepository(path, create=True)
152 SubversionRepository(path, create=True)
153 with open('{}/db/format'.format(path)) as f:
153 with open('{}/db/format'.format(path)) as f:
154 first_line = f.readline().strip()
154 first_line = f.readline().strip()
155 assert first_line == '4'
155 assert first_line == '4'
156
156
157
157
158 def test_invalid_compatible_version(monkeypatch, vcsbackend):
158 def test_invalid_compatible_version(monkeypatch, vcsbackend):
159 monkeypatch.setattr(settings, 'SVN_COMPATIBLE_VERSION', 'i-am-an-invalid-setting')
159 monkeypatch.setattr(settings, 'SVN_COMPATIBLE_VERSION', 'i-am-an-invalid-setting')
160 path = vcsbackend.new_repo_path()
160 path = vcsbackend.new_repo_path()
161 with pytest.raises(Exception):
161 with pytest.raises(Exception):
162 SubversionRepository(path, create=True)
162 SubversionRepository(path, create=True)
163
163
164
164
165 class TestSVNCommit(object):
165 class TestSVNCommit(object):
166
166
167 @pytest.fixture(autouse=True)
167 @pytest.fixture(autouse=True)
168 def prepare(self, repo):
168 def prepare(self, repo):
169 self.repo = repo
169 self.repo = repo
170
170
171 def test_file_history_from_commits(self):
171 def test_file_history_from_commits(self):
172 node = self.repo[10].get_node('setup.py')
172 node = self.repo[10].get_node('setup.py')
173 commit_ids = [commit.raw_id for commit in node.history]
173 commit_ids = [commit.raw_id for commit in node.history]
174 assert ['8'] == commit_ids
174 assert ['8'] == commit_ids
175
175
176 node = self.repo[20].get_node('setup.py')
176 node = self.repo[20].get_node('setup.py')
177 node_ids = [commit.raw_id for commit in node.history]
177 node_ids = [commit.raw_id for commit in node.history]
178 assert ['18',
178 assert ['18',
179 '8'] == node_ids
179 '8'] == node_ids
180
180
181 # special case we check history from commit that has this particular
181 # special case we check history from commit that has this particular
182 # file changed this means we check if it's included as well
182 # file changed this means we check if it's included as well
183 node = self.repo.get_commit('18').get_node('setup.py')
183 node = self.repo.get_commit('18').get_node('setup.py')
184 node_ids = [commit.raw_id for commit in node.history]
184 node_ids = [commit.raw_id for commit in node.history]
185 assert ['18',
185 assert ['18',
186 '8'] == node_ids
186 '8'] == node_ids
@@ -1,341 +1,341 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 py.test config for test suite for making push/pull operations.
22 py.test config for test suite for making push/pull operations.
23
23
24 .. important::
24 .. important::
25
25
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 to redirect things to stderr instead of stdout.
27 to redirect things to stderr instead of stdout.
28 """
28 """
29
29
30 import os
30 import os
31 import tempfile
31 import tempfile
32 import textwrap
32 import textwrap
33 import pytest
33 import pytest
34
34
35 from rhodecode import events
35 from rhodecode import events
36 from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \
36 from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \
37 UserToRepoBranchPermission, User
37 UserToRepoBranchPermission, User
38 from rhodecode.model.integration import IntegrationModel
38 from rhodecode.model.integration import IntegrationModel
39 from rhodecode.model.db import Repository
39 from rhodecode.model.db import Repository
40 from rhodecode.model.meta import Session
40 from rhodecode.model.meta import Session
41 from rhodecode.model.settings import SettingsModel
41 from rhodecode.model.settings import SettingsModel
42 from rhodecode.integrations.types.webhook import WebhookIntegrationType
42 from rhodecode.integrations.types.webhook import WebhookIntegrationType
43
43
44 from rhodecode.tests import GIT_REPO, HG_REPO
44 from rhodecode.tests import GIT_REPO, HG_REPO
45 from rhodecode.tests.fixture import Fixture
45 from rhodecode.tests.fixture import Fixture
46 from rhodecode.tests.server_utils import RcWebServer
46 from rhodecode.tests.server_utils import RcWebServer
47
47
48 REPO_GROUP = 'a_repo_group'
48 REPO_GROUP = 'a_repo_group'
49 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
49 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
50 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
50 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
51
51
52
52
53 @pytest.fixture(scope="module")
53 @pytest.fixture(scope="module")
54 def rcextensions(request, db_connection, tmpdir_factory):
54 def rcextensions(request, db_connection, tmpdir_factory):
55 """
55 """
56 Installs a testing rcextensions pack to ensure they work as expected.
56 Installs a testing rcextensions pack to ensure they work as expected.
57 """
57 """
58 init_content = textwrap.dedent("""
58 init_content = textwrap.dedent("""
59 # Forward import the example rcextensions to make it
59 # Forward import the example rcextensions to make it
60 # active for our tests.
60 # active for our tests.
61 from rhodecode.tests.other.example_rcextensions import *
61 from rhodecode.tests.other.example_rcextensions import *
62 """)
62 """)
63
63
64 # Note: rcextensions are looked up based on the path of the ini file
64 # Note: rcextensions are looked up based on the path of the ini file
65 root_path = tmpdir_factory.getbasetemp()
65 root_path = tmpdir_factory.getbasetemp()
66 rcextensions_path = root_path.join('rcextensions')
66 rcextensions_path = root_path.join('rcextensions')
67 init_path = rcextensions_path.join('__init__.py')
67 init_path = rcextensions_path.join('__init__.py')
68
68
69 if rcextensions_path.check():
69 if rcextensions_path.check():
70 pytest.fail(
70 pytest.fail(
71 "Path for rcextensions already exists, please clean up before "
71 "Path for rcextensions already exists, please clean up before "
72 "test run this path: %s" % (rcextensions_path, ))
72 "test run this path: %s" % (rcextensions_path, ))
73 return
73 return
74
74
75 request.addfinalizer(rcextensions_path.remove)
75 request.addfinalizer(rcextensions_path.remove)
76 init_path.write_binary(init_content, ensure=True)
76 init_path.write_binary(init_content, ensure=True)
77
77
78
78
79 @pytest.fixture(scope="module")
79 @pytest.fixture(scope="module")
80 def repos(request, db_connection):
80 def repos(request, db_connection):
81 """Create a copy of each test repo in a repo group."""
81 """Create a copy of each test repo in a repo group."""
82 fixture = Fixture()
82 fixture = Fixture()
83 repo_group = fixture.create_repo_group(REPO_GROUP)
83 repo_group = fixture.create_repo_group(REPO_GROUP)
84 repo_group_id = repo_group.group_id
84 repo_group_id = repo_group.group_id
85 fixture.create_fork(HG_REPO, HG_REPO,
85 fixture.create_fork(HG_REPO, HG_REPO,
86 repo_name_full=HG_REPO_WITH_GROUP,
86 repo_name_full=HG_REPO_WITH_GROUP,
87 repo_group=repo_group_id)
87 repo_group=repo_group_id)
88 fixture.create_fork(GIT_REPO, GIT_REPO,
88 fixture.create_fork(GIT_REPO, GIT_REPO,
89 repo_name_full=GIT_REPO_WITH_GROUP,
89 repo_name_full=GIT_REPO_WITH_GROUP,
90 repo_group=repo_group_id)
90 repo_group=repo_group_id)
91
91
92 @request.addfinalizer
92 @request.addfinalizer
93 def cleanup():
93 def cleanup():
94 fixture.destroy_repo(HG_REPO_WITH_GROUP)
94 fixture.destroy_repo(HG_REPO_WITH_GROUP)
95 fixture.destroy_repo(GIT_REPO_WITH_GROUP)
95 fixture.destroy_repo(GIT_REPO_WITH_GROUP)
96 fixture.destroy_repo_group(repo_group_id)
96 fixture.destroy_repo_group(repo_group_id)
97
97
98
98
99 @pytest.fixture(scope="module")
99 @pytest.fixture(scope="module")
100 def rc_web_server_config_modification():
100 def rc_web_server_config_modification():
101 return []
101 return []
102
102
103
103
104 @pytest.fixture(scope="module")
104 @pytest.fixture(scope="module")
105 def rc_web_server_config_factory(testini_factory, rc_web_server_config_modification):
105 def rc_web_server_config_factory(testini_factory, rc_web_server_config_modification):
106 """
106 """
107 Configuration file used for the fixture `rc_web_server`.
107 Configuration file used for the fixture `rc_web_server`.
108 """
108 """
109
109
110 def factory(rcweb_port, vcsserver_port):
110 def factory(rcweb_port, vcsserver_port):
111 custom_params = [
111 custom_params = [
112 {'handler_console': {'level': 'DEBUG'}},
112 {'handler_console': {'level': 'DEBUG'}},
113 {'server:main': {'port': rcweb_port}},
113 {'server:main': {'port': rcweb_port}},
114 {'app:main': {'vcs.server': 'localhost:%s' % vcsserver_port}}
114 {'app:main': {'vcs.server': 'localhost:%s' % vcsserver_port}}
115 ]
115 ]
116 custom_params.extend(rc_web_server_config_modification)
116 custom_params.extend(rc_web_server_config_modification)
117 return testini_factory(custom_params)
117 return testini_factory(custom_params)
118 return factory
118 return factory
119
119
120
120
121 @pytest.fixture(scope="module")
121 @pytest.fixture(scope="module")
122 def rc_web_server(
122 def rc_web_server(
123 request, vcsserver_factory, available_port_factory,
123 request, vcsserver_factory, available_port_factory,
124 rc_web_server_config_factory, repos, rcextensions):
124 rc_web_server_config_factory, repos, rcextensions):
125 """
125 """
126 Run the web server as a subprocess. with it's own instance of vcsserver
126 Run the web server as a subprocess. with it's own instance of vcsserver
127 """
127 """
128 rcweb_port = available_port_factory()
128 rcweb_port = available_port_factory()
129 print('Using rcweb ops test port {}'.format(rcweb_port))
129 print('Using rcweb ops test port {}'.format(rcweb_port))
130
130
131 vcsserver_port = available_port_factory()
131 vcsserver_port = available_port_factory()
132 print('Using vcsserver ops test port {}'.format(vcsserver_port))
132 print('Using vcsserver ops test port {}'.format(vcsserver_port))
133
133
134 vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log')
134 vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log')
135 vcsserver_factory(
135 vcsserver_factory(
136 request, vcsserver_port=vcsserver_port,
136 request, vcsserver_port=vcsserver_port,
137 log_file=vcs_log,
137 log_file=vcs_log,
138 overrides=(
138 overrides=(
139 {'server:main': {'workers': 2}},
139 {'server:main': {'workers': 2}},
140 {'server:main': {'graceful_timeout': 10}},
140 {'server:main': {'graceful_timeout': 10}},
141 ))
141 ))
142
142
143 rc_log = os.path.join(tempfile.gettempdir(), 'rc_op_web.log')
143 rc_log = os.path.join(tempfile.gettempdir(), 'rc_op_web.log')
144 rc_web_server_config = rc_web_server_config_factory(
144 rc_web_server_config = rc_web_server_config_factory(
145 rcweb_port=rcweb_port,
145 rcweb_port=rcweb_port,
146 vcsserver_port=vcsserver_port)
146 vcsserver_port=vcsserver_port)
147 server = RcWebServer(rc_web_server_config, log_file=rc_log)
147 server = RcWebServer(rc_web_server_config, log_file=rc_log)
148 server.start()
148 server.start()
149
149
150 @request.addfinalizer
150 @request.addfinalizer
151 def cleanup():
151 def cleanup():
152 server.shutdown()
152 server.shutdown()
153
153
154 server.wait_until_ready()
154 server.wait_until_ready()
155 return server
155 return server
156
156
157
157
158 @pytest.fixture
158 @pytest.fixture()
159 def disable_locking(baseapp):
159 def disable_locking(baseapp):
160 r = Repository.get_by_repo_name(GIT_REPO)
160 r = Repository.get_by_repo_name(GIT_REPO)
161 Repository.unlock(r)
161 Repository.unlock(r)
162 r.enable_locking = False
162 r.enable_locking = False
163 Session().add(r)
163 Session().add(r)
164 Session().commit()
164 Session().commit()
165
165
166 r = Repository.get_by_repo_name(HG_REPO)
166 r = Repository.get_by_repo_name(HG_REPO)
167 Repository.unlock(r)
167 Repository.unlock(r)
168 r.enable_locking = False
168 r.enable_locking = False
169 Session().add(r)
169 Session().add(r)
170 Session().commit()
170 Session().commit()
171
171
172
172
173 @pytest.fixture
173 @pytest.fixture()
174 def enable_auth_plugins(request, baseapp, csrf_token):
174 def enable_auth_plugins(request, baseapp, csrf_token):
175 """
175 """
176 Return a factory object that when called, allows to control which
176 Return a factory object that when called, allows to control which
177 authentication plugins are enabled.
177 authentication plugins are enabled.
178 """
178 """
179 def _enable_plugins(plugins_list, override=None):
179 def _enable_plugins(plugins_list, override=None):
180 override = override or {}
180 override = override or {}
181 params = {
181 params = {
182 'auth_plugins': ','.join(plugins_list),
182 'auth_plugins': ','.join(plugins_list),
183 }
183 }
184
184
185 # helper translate some names to others
185 # helper translate some names to others
186 name_map = {
186 name_map = {
187 'token': 'authtoken'
187 'token': 'authtoken'
188 }
188 }
189
189
190 for module in plugins_list:
190 for module in plugins_list:
191 plugin_name = module.partition('#')[-1]
191 plugin_name = module.partition('#')[-1]
192 if plugin_name in name_map:
192 if plugin_name in name_map:
193 plugin_name = name_map[plugin_name]
193 plugin_name = name_map[plugin_name]
194 enabled_plugin = 'auth_%s_enabled' % plugin_name
194 enabled_plugin = 'auth_%s_enabled' % plugin_name
195 cache_ttl = 'auth_%s_cache_ttl' % plugin_name
195 cache_ttl = 'auth_%s_cache_ttl' % plugin_name
196
196
197 # default params that are needed for each plugin,
197 # default params that are needed for each plugin,
198 # `enabled` and `cache_ttl`
198 # `enabled` and `cache_ttl`
199 params.update({
199 params.update({
200 enabled_plugin: True,
200 enabled_plugin: True,
201 cache_ttl: 0
201 cache_ttl: 0
202 })
202 })
203 if override.get:
203 if override.get:
204 params.update(override.get(module, {}))
204 params.update(override.get(module, {}))
205
205
206 validated_params = params
206 validated_params = params
207 for k, v in validated_params.items():
207 for k, v in validated_params.items():
208 setting = SettingsModel().create_or_update_setting(k, v)
208 setting = SettingsModel().create_or_update_setting(k, v)
209 Session().add(setting)
209 Session().add(setting)
210 Session().commit()
210 Session().commit()
211
211
212 def cleanup():
212 def cleanup():
213 _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode'])
213 _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode'])
214
214
215 request.addfinalizer(cleanup)
215 request.addfinalizer(cleanup)
216
216
217 return _enable_plugins
217 return _enable_plugins
218
218
219
219
220 @pytest.fixture
220 @pytest.fixture()
221 def fs_repo_only(request, rhodecode_fixtures):
221 def fs_repo_only(request, rhodecode_fixtures):
222 def fs_repo_fabric(repo_name, repo_type):
222 def fs_repo_fabric(repo_name, repo_type):
223 rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type)
223 rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type)
224 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False)
224 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False)
225
225
226 def cleanup():
226 def cleanup():
227 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True)
227 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True)
228 rhodecode_fixtures.destroy_repo_on_filesystem(repo_name)
228 rhodecode_fixtures.destroy_repo_on_filesystem(repo_name)
229
229
230 request.addfinalizer(cleanup)
230 request.addfinalizer(cleanup)
231
231
232 return fs_repo_fabric
232 return fs_repo_fabric
233
233
234
234
235 @pytest.fixture
235 @pytest.fixture()
236 def enable_webhook_push_integration(request):
236 def enable_webhook_push_integration(request):
237 integration = Integration()
237 integration = Integration()
238 integration.integration_type = WebhookIntegrationType.key
238 integration.integration_type = WebhookIntegrationType.key
239 Session().add(integration)
239 Session().add(integration)
240
240
241 settings = dict(
241 settings = dict(
242 url='http://httpbin.org/post',
242 url='http://httpbin.org/post',
243 secret_token='secret',
243 secret_token='secret',
244 username=None,
244 username=None,
245 password=None,
245 password=None,
246 custom_header_key=None,
246 custom_header_key=None,
247 custom_header_val=None,
247 custom_header_val=None,
248 method_type='post',
248 method_type='post',
249 events=[events.RepoPushEvent.name],
249 events=[events.RepoPushEvent.name],
250 log_data=True
250 log_data=True
251 )
251 )
252
252
253 IntegrationModel().update_integration(
253 IntegrationModel().update_integration(
254 integration,
254 integration,
255 name='IntegrationWebhookTest',
255 name='IntegrationWebhookTest',
256 enabled=True,
256 enabled=True,
257 settings=settings,
257 settings=settings,
258 repo=None,
258 repo=None,
259 repo_group=None,
259 repo_group=None,
260 child_repos_only=False,
260 child_repos_only=False,
261 )
261 )
262 Session().commit()
262 Session().commit()
263 integration_id = integration.integration_id
263 integration_id = integration.integration_id
264
264
265 @request.addfinalizer
265 @request.addfinalizer
266 def cleanup():
266 def cleanup():
267 integration = Integration.get(integration_id)
267 integration = Integration.get(integration_id)
268 Session().delete(integration)
268 Session().delete(integration)
269 Session().commit()
269 Session().commit()
270
270
271
271
272 @pytest.fixture
272 @pytest.fixture()
273 def branch_permission_setter(request):
273 def branch_permission_setter(request):
274 """
274 """
275
275
276 def my_test(branch_permission_setter)
276 def my_test(branch_permission_setter)
277 branch_permission_setter(repo_name, username, pattern='*', permission='branch.push')
277 branch_permission_setter(repo_name, username, pattern='*', permission='branch.push')
278
278
279 """
279 """
280
280
281 rule_id = None
281 rule_id = None
282 write_perm_id = None
282 write_perm_id = None
283
283
284 def _branch_permissions_setter(
284 def _branch_permissions_setter(
285 repo_name, username, pattern='*', permission='branch.push_force'):
285 repo_name, username, pattern='*', permission='branch.push_force'):
286 global rule_id, write_perm_id
286 global rule_id, write_perm_id
287
287
288 repo = Repository.get_by_repo_name(repo_name)
288 repo = Repository.get_by_repo_name(repo_name)
289 repo_id = repo.repo_id
289 repo_id = repo.repo_id
290
290
291 user = User.get_by_username(username)
291 user = User.get_by_username(username)
292 user_id = user.user_id
292 user_id = user.user_id
293
293
294 rule_perm_obj = Permission.get_by_key(permission)
294 rule_perm_obj = Permission.get_by_key(permission)
295
295
296 write_perm = None
296 write_perm = None
297
297
298 # add new entry, based on existing perm entry
298 # add new entry, based on existing perm entry
299 perm = UserRepoToPerm.query() \
299 perm = UserRepoToPerm.query() \
300 .filter(UserRepoToPerm.repository_id == repo_id) \
300 .filter(UserRepoToPerm.repository_id == repo_id) \
301 .filter(UserRepoToPerm.user_id == user_id) \
301 .filter(UserRepoToPerm.user_id == user_id) \
302 .first()
302 .first()
303
303
304 if not perm:
304 if not perm:
305 # such user isn't defined in Permissions for repository
305 # such user isn't defined in Permissions for repository
306 # we now on-the-fly add new permission
306 # we now on-the-fly add new permission
307
307
308 write_perm = UserRepoToPerm()
308 write_perm = UserRepoToPerm()
309 write_perm.permission = Permission.get_by_key('repository.write')
309 write_perm.permission = Permission.get_by_key('repository.write')
310 write_perm.repository_id = repo_id
310 write_perm.repository_id = repo_id
311 write_perm.user_id = user_id
311 write_perm.user_id = user_id
312 Session().add(write_perm)
312 Session().add(write_perm)
313 Session().flush()
313 Session().flush()
314
314
315 perm = write_perm
315 perm = write_perm
316
316
317 rule = UserToRepoBranchPermission()
317 rule = UserToRepoBranchPermission()
318 rule.rule_to_perm_id = perm.repo_to_perm_id
318 rule.rule_to_perm_id = perm.repo_to_perm_id
319 rule.branch_pattern = pattern
319 rule.branch_pattern = pattern
320 rule.rule_order = 10
320 rule.rule_order = 10
321 rule.permission = rule_perm_obj
321 rule.permission = rule_perm_obj
322 rule.repository_id = repo_id
322 rule.repository_id = repo_id
323 Session().add(rule)
323 Session().add(rule)
324 Session().commit()
324 Session().commit()
325
325
326 global rule, write_perm
326 global rule, write_perm
327
327
328 return rule
328 return rule
329
329
330 @request.addfinalizer
330 @request.addfinalizer
331 def cleanup():
331 def cleanup():
332 if rule:
332 if rule:
333 Session().delete(rule)
333 Session().delete(rule)
334 Session().commit()
334 Session().commit()
335 if write_perm:
335 if write_perm:
336 Session().delete(write_perm)
336 Session().delete(write_perm)
337 Session().commit()
337 Session().commit()
338
338
339 return _branch_permissions_setter
339 return _branch_permissions_setter
340
340
341
341
General Comments 0
You need to be logged in to leave comments. Login now