##// END OF EJS Templates
tests: fixed tests for archivals
super-admin -
r5150:1fa672f5 default
parent child Browse files
Show More
@@ -1,1125 +1,1125 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import os
20 import os
21
21
22 import mock
22 import mock
23 import pytest
23 import pytest
24 from collections import OrderedDict
24
25
25 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
26 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
26 from rhodecode.apps.repository.views.repo_files import RepoFilesView, get_archive_name, get_path_sha
27 from rhodecode.apps.repository.views.repo_files import RepoFilesView, get_archive_name, get_path_sha
27 from rhodecode.lib import helpers as h
28 from rhodecode.lib import helpers as h
28 from collections import OrderedDict
29 from rhodecode.lib.ext_json import json
29 from rhodecode.lib.ext_json import json
30 from rhodecode.lib.str_utils import safe_str
30 from rhodecode.lib.str_utils import safe_str
31 from rhodecode.lib.vcs import nodes
31 from rhodecode.lib.vcs import nodes
32 from rhodecode.lib.vcs.conf import settings
33 from rhodecode.model.db import Session, Repository
32
34
33 from rhodecode.lib.vcs.conf import settings
34 from rhodecode.tests import assert_session_flash
35 from rhodecode.tests import assert_session_flash
35 from rhodecode.tests.fixture import Fixture
36 from rhodecode.tests.fixture import Fixture
36 from rhodecode.model.db import Session
37
37
38 fixture = Fixture()
38 fixture = Fixture()
39
39
40
40
41 def get_node_history(backend_type):
41 def get_node_history(backend_type):
42 return {
42 return {
43 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
43 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
44 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
44 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
45 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
45 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
46 }[backend_type]
46 }[backend_type]
47
47
48
48
49 def route_path(name, params=None, **kwargs):
49 def route_path(name, params=None, **kwargs):
50 import urllib.request
50 import urllib.request
51 import urllib.parse
51 import urllib.parse
52 import urllib.error
52 import urllib.error
53
53
54 base_url = {
54 base_url = {
55 'repo_summary': '/{repo_name}',
55 'repo_summary': '/{repo_name}',
56 'repo_archivefile': '/{repo_name}/archive/{fname}',
56 'repo_archivefile': '/{repo_name}/archive/{fname}',
57 'repo_files_diff': '/{repo_name}/diff/{f_path}',
57 'repo_files_diff': '/{repo_name}/diff/{f_path}',
58 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
58 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
59 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
59 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
60 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
60 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
61 'repo_files:default_commit': '/{repo_name}/files',
61 'repo_files:default_commit': '/{repo_name}/files',
62 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
62 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
63 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
63 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
64 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
64 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
65 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
65 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
66 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
66 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
67 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
67 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
68 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
68 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
69 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
69 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
70 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
70 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
71 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
71 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
72 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
72 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
73 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
73 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
74 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
74 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
75 'repo_files_upload_file': '/{repo_name}/upload_file/{commit_id}/{f_path}',
75 'repo_files_upload_file': '/{repo_name}/upload_file/{commit_id}/{f_path}',
76 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
76 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
77 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
77 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
78 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
78 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
79 }[name].format(**kwargs)
79 }[name].format(**kwargs)
80
80
81 if params:
81 if params:
82 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
82 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
83 return base_url
83 return base_url
84
84
85
85
86 def assert_files_in_response(response, files, params):
86 def assert_files_in_response(response, files, params):
87 template = (
87 template = (
88 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
88 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
89 _assert_items_in_response(response, files, template, params)
89 _assert_items_in_response(response, files, template, params)
90
90
91
91
92 def assert_dirs_in_response(response, dirs, params):
92 def assert_dirs_in_response(response, dirs, params):
93 template = (
93 template = (
94 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
94 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
95 _assert_items_in_response(response, dirs, template, params)
95 _assert_items_in_response(response, dirs, template, params)
96
96
97
97
98 def _assert_items_in_response(response, items, template, params):
98 def _assert_items_in_response(response, items, template, params):
99 for item in items:
99 for item in items:
100 item_params = {'name': item}
100 item_params = {'name': item}
101 item_params.update(params)
101 item_params.update(params)
102 response.mustcontain(template % item_params)
102 response.mustcontain(template % item_params)
103
103
104
104
105 def assert_timeago_in_response(response, items, params):
105 def assert_timeago_in_response(response, items, params):
106 for item in items:
106 for item in items:
107 response.mustcontain(h.age_component(params['date']))
107 response.mustcontain(h.age_component(params['date']))
108
108
109
109
110 @pytest.mark.usefixtures("app")
110 @pytest.mark.usefixtures("app")
111 class TestFilesViews(object):
111 class TestFilesViews(object):
112
112
113 def test_show_files(self, backend):
113 def test_show_files(self, backend):
114 response = self.app.get(
114 response = self.app.get(
115 route_path('repo_files',
115 route_path('repo_files',
116 repo_name=backend.repo_name,
116 repo_name=backend.repo_name,
117 commit_id='tip', f_path='/'))
117 commit_id='tip', f_path='/'))
118 commit = backend.repo.get_commit()
118 commit = backend.repo.get_commit()
119
119
120 params = {
120 params = {
121 'repo_name': backend.repo_name,
121 'repo_name': backend.repo_name,
122 'commit_id': commit.raw_id,
122 'commit_id': commit.raw_id,
123 'date': commit.date
123 'date': commit.date
124 }
124 }
125 assert_dirs_in_response(response, ['docs', 'vcs'], params)
125 assert_dirs_in_response(response, ['docs', 'vcs'], params)
126 files = [
126 files = [
127 '.gitignore',
127 '.gitignore',
128 '.hgignore',
128 '.hgignore',
129 '.hgtags',
129 '.hgtags',
130 # TODO: missing in Git
130 # TODO: missing in Git
131 # '.travis.yml',
131 # '.travis.yml',
132 'MANIFEST.in',
132 'MANIFEST.in',
133 'README.rst',
133 'README.rst',
134 # TODO: File is missing in svn repository
134 # TODO: File is missing in svn repository
135 # 'run_test_and_report.sh',
135 # 'run_test_and_report.sh',
136 'setup.cfg',
136 'setup.cfg',
137 'setup.py',
137 'setup.py',
138 'test_and_report.sh',
138 'test_and_report.sh',
139 'tox.ini',
139 'tox.ini',
140 ]
140 ]
141 assert_files_in_response(response, files, params)
141 assert_files_in_response(response, files, params)
142 assert_timeago_in_response(response, files, params)
142 assert_timeago_in_response(response, files, params)
143
143
144 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
144 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
145 repo = backend_hg['subrepos']
145 repo = backend_hg['subrepos']
146 response = self.app.get(
146 response = self.app.get(
147 route_path('repo_files',
147 route_path('repo_files',
148 repo_name=repo.repo_name,
148 repo_name=repo.repo_name,
149 commit_id='tip', f_path='/'))
149 commit_id='tip', f_path='/'))
150 assert_response = response.assert_response()
150 assert_response = response.assert_response()
151 assert_response.contains_one_link(
151 assert_response.contains_one_link(
152 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
152 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
153
153
154 def test_show_files_links_submodules_with_absolute_url_subpaths(
154 def test_show_files_links_submodules_with_absolute_url_subpaths(
155 self, backend_hg):
155 self, backend_hg):
156 repo = backend_hg['subrepos']
156 repo = backend_hg['subrepos']
157 response = self.app.get(
157 response = self.app.get(
158 route_path('repo_files',
158 route_path('repo_files',
159 repo_name=repo.repo_name,
159 repo_name=repo.repo_name,
160 commit_id='tip', f_path='/'))
160 commit_id='tip', f_path='/'))
161 assert_response = response.assert_response()
161 assert_response = response.assert_response()
162 assert_response.contains_one_link(
162 assert_response.contains_one_link(
163 'subpaths-path @ 000000000000',
163 'subpaths-path @ 000000000000',
164 'http://sub-base.example.com/subpaths-path')
164 'http://sub-base.example.com/subpaths-path')
165
165
166 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
166 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
167 def test_files_menu(self, backend):
167 def test_files_menu(self, backend):
168 new_branch = "temp_branch_name"
168 new_branch = "temp_branch_name"
169 commits = [
169 commits = [
170 {'message': 'a'},
170 {'message': 'a'},
171 {'message': 'b', 'branch': new_branch}
171 {'message': 'b', 'branch': new_branch}
172 ]
172 ]
173 backend.create_repo(commits)
173 backend.create_repo(commits)
174 backend.repo.landing_rev = "branch:%s" % new_branch
174 backend.repo.landing_rev = f"branch:{new_branch}"
175 Session().commit()
175 Session().commit()
176
176
177 # get response based on tip and not new commit
177 # get response based on tip and not new commit
178 response = self.app.get(
178 response = self.app.get(
179 route_path('repo_files',
179 route_path('repo_files',
180 repo_name=backend.repo_name,
180 repo_name=backend.repo_name,
181 commit_id='tip', f_path='/'))
181 commit_id='tip', f_path='/'))
182
182
183 # make sure Files menu url is not tip but new commit
183 # make sure Files menu url is not tip but new commit
184 landing_rev = backend.repo.landing_ref_name
184 landing_rev = backend.repo.landing_ref_name
185 files_url = route_path('repo_files:default_path',
185 files_url = route_path('repo_files:default_path',
186 repo_name=backend.repo_name,
186 repo_name=backend.repo_name,
187 commit_id=landing_rev, params={'at': landing_rev})
187 commit_id=landing_rev, params={'at': landing_rev})
188
188
189 assert landing_rev != 'tip'
189 assert landing_rev != 'tip'
190 response.mustcontain(
190 response.mustcontain(f'<li class="active"><a class="menulink" href="{files_url}">')
191 '<li class="active"><a class="menulink" href="%s">' % files_url)
192
191
193 def test_show_files_commit(self, backend):
192 def test_show_files_commit(self, backend):
194 commit = backend.repo.get_commit(commit_idx=32)
193 commit = backend.repo.get_commit(commit_idx=32)
195
194
196 response = self.app.get(
195 response = self.app.get(
197 route_path('repo_files',
196 route_path('repo_files',
198 repo_name=backend.repo_name,
197 repo_name=backend.repo_name,
199 commit_id=commit.raw_id, f_path='/'))
198 commit_id=commit.raw_id, f_path='/'))
200
199
201 dirs = ['docs', 'tests']
200 dirs = ['docs', 'tests']
202 files = ['README.rst']
201 files = ['README.rst']
203 params = {
202 params = {
204 'repo_name': backend.repo_name,
203 'repo_name': backend.repo_name,
205 'commit_id': commit.raw_id,
204 'commit_id': commit.raw_id,
206 }
205 }
207 assert_dirs_in_response(response, dirs, params)
206 assert_dirs_in_response(response, dirs, params)
208 assert_files_in_response(response, files, params)
207 assert_files_in_response(response, files, params)
209
208
210 def test_show_files_different_branch(self, backend):
209 def test_show_files_different_branch(self, backend):
211 branches = dict(
210 branches = dict(
212 hg=(150, ['git']),
211 hg=(150, ['git']),
213 # TODO: Git test repository does not contain other branches
212 # TODO: Git test repository does not contain other branches
214 git=(633, ['master']),
213 git=(633, ['master']),
215 # TODO: Branch support in Subversion
214 # TODO: Branch support in Subversion
216 svn=(150, [])
215 svn=(150, [])
217 )
216 )
218 idx, branches = branches[backend.alias]
217 idx, branches = branches[backend.alias]
219 commit = backend.repo.get_commit(commit_idx=idx)
218 commit = backend.repo.get_commit(commit_idx=idx)
220 response = self.app.get(
219 response = self.app.get(
221 route_path('repo_files',
220 route_path('repo_files',
222 repo_name=backend.repo_name,
221 repo_name=backend.repo_name,
223 commit_id=commit.raw_id, f_path='/'))
222 commit_id=commit.raw_id, f_path='/'))
224
223
225 assert_response = response.assert_response()
224 assert_response = response.assert_response()
226 for branch in branches:
225 for branch in branches:
227 assert_response.element_contains('.tags .branchtag', branch)
226 assert_response.element_contains('.tags .branchtag', branch)
228
227
229 def test_show_files_paging(self, backend):
228 def test_show_files_paging(self, backend):
230 repo = backend.repo
229 repo = backend.repo
231 indexes = [73, 92, 109, 1, 0]
230 indexes = [73, 92, 109, 1, 0]
232 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
231 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
233 for rev in indexes]
232 for rev in indexes]
234
233
235 for idx in idx_map:
234 for idx in idx_map:
236 response = self.app.get(
235 response = self.app.get(
237 route_path('repo_files',
236 route_path('repo_files',
238 repo_name=backend.repo_name,
237 repo_name=backend.repo_name,
239 commit_id=idx[1], f_path='/'))
238 commit_id=idx[1], f_path='/'))
240
239
241 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
240 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
242
241
243 def test_file_source(self, backend):
242 def test_file_source(self, backend):
244 commit = backend.repo.get_commit(commit_idx=167)
243 commit = backend.repo.get_commit(commit_idx=167)
245 response = self.app.get(
244 response = self.app.get(
246 route_path('repo_files',
245 route_path('repo_files',
247 repo_name=backend.repo_name,
246 repo_name=backend.repo_name,
248 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
247 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
249
248
250 msgbox = """<div class="commit">%s</div>"""
249 msgbox = """<div class="commit">%s</div>"""
251 response.mustcontain(msgbox % (commit.message, ))
250 response.mustcontain(msgbox % (commit.message, ))
252
251
253 assert_response = response.assert_response()
252 assert_response = response.assert_response()
254 if commit.branch:
253 if commit.branch:
255 assert_response.element_contains(
254 assert_response.element_contains(
256 '.tags.tags-main .branchtag', commit.branch)
255 '.tags.tags-main .branchtag', commit.branch)
257 if commit.tags:
256 if commit.tags:
258 for tag in commit.tags:
257 for tag in commit.tags:
259 assert_response.element_contains('.tags.tags-main .tagtag', tag)
258 assert_response.element_contains('.tags.tags-main .tagtag', tag)
260
259
261 def test_file_source_annotated(self, backend):
260 def test_file_source_annotated(self, backend):
262 response = self.app.get(
261 response = self.app.get(
263 route_path('repo_files:annotated',
262 route_path('repo_files:annotated',
264 repo_name=backend.repo_name,
263 repo_name=backend.repo_name,
265 commit_id='tip', f_path='vcs/nodes.py'))
264 commit_id='tip', f_path='vcs/nodes.py'))
266 expected_commits = {
265 expected_commits = {
267 'hg': 'r356',
266 'hg': 'r356',
268 'git': 'r345',
267 'git': 'r345',
269 'svn': 'r208',
268 'svn': 'r208',
270 }
269 }
271 response.mustcontain(expected_commits[backend.alias])
270 response.mustcontain(expected_commits[backend.alias])
272
271
273 def test_file_source_authors(self, backend):
272 def test_file_source_authors(self, backend):
274 response = self.app.get(
273 response = self.app.get(
275 route_path('repo_file_authors',
274 route_path('repo_file_authors',
276 repo_name=backend.repo_name,
275 repo_name=backend.repo_name,
277 commit_id='tip', f_path='vcs/nodes.py'))
276 commit_id='tip', f_path='vcs/nodes.py'))
278 expected_authors = {
277 expected_authors = {
279 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
278 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
280 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
279 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
281 'svn': ('marcin', 'lukasz'),
280 'svn': ('marcin', 'lukasz'),
282 }
281 }
283
282
284 for author in expected_authors[backend.alias]:
283 for author in expected_authors[backend.alias]:
285 response.mustcontain(author)
284 response.mustcontain(author)
286
285
287 def test_file_source_authors_with_annotation(self, backend):
286 def test_file_source_authors_with_annotation(self, backend):
288 response = self.app.get(
287 response = self.app.get(
289 route_path('repo_file_authors',
288 route_path('repo_file_authors',
290 repo_name=backend.repo_name,
289 repo_name=backend.repo_name,
291 commit_id='tip', f_path='vcs/nodes.py',
290 commit_id='tip', f_path='vcs/nodes.py',
292 params=dict(annotate=1)))
291 params=dict(annotate=1)))
293 expected_authors = {
292 expected_authors = {
294 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
293 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
295 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
294 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
296 'svn': ('marcin', 'lukasz'),
295 'svn': ('marcin', 'lukasz'),
297 }
296 }
298
297
299 for author in expected_authors[backend.alias]:
298 for author in expected_authors[backend.alias]:
300 response.mustcontain(author)
299 response.mustcontain(author)
301
300
302 def test_file_source_history(self, backend, xhr_header):
301 def test_file_source_history(self, backend, xhr_header):
303 response = self.app.get(
302 response = self.app.get(
304 route_path('repo_file_history',
303 route_path('repo_file_history',
305 repo_name=backend.repo_name,
304 repo_name=backend.repo_name,
306 commit_id='tip', f_path='vcs/nodes.py'),
305 commit_id='tip', f_path='vcs/nodes.py'),
307 extra_environ=xhr_header)
306 extra_environ=xhr_header)
308 assert get_node_history(backend.alias) == json.loads(response.body)
307 assert get_node_history(backend.alias) == json.loads(response.body)
309
308
310 def test_file_source_history_svn(self, backend_svn, xhr_header):
309 def test_file_source_history_svn(self, backend_svn, xhr_header):
311 simple_repo = backend_svn['svn-simple-layout']
310 simple_repo = backend_svn['svn-simple-layout']
312 response = self.app.get(
311 response = self.app.get(
313 route_path('repo_file_history',
312 route_path('repo_file_history',
314 repo_name=simple_repo.repo_name,
313 repo_name=simple_repo.repo_name,
315 commit_id='tip', f_path='trunk/example.py'),
314 commit_id='tip', f_path='trunk/example.py'),
316 extra_environ=xhr_header)
315 extra_environ=xhr_header)
317
316
318 expected_data = json.loads(
317 expected_data = json.loads(
319 fixture.load_resource('svn_node_history_branches.json'))
318 fixture.load_resource('svn_node_history_branches.json'))
320
319
321 assert expected_data == response.json
320 assert expected_data == response.json
322
321
323 def test_file_source_history_with_annotation(self, backend, xhr_header):
322 def test_file_source_history_with_annotation(self, backend, xhr_header):
324 response = self.app.get(
323 response = self.app.get(
325 route_path('repo_file_history',
324 route_path('repo_file_history',
326 repo_name=backend.repo_name,
325 repo_name=backend.repo_name,
327 commit_id='tip', f_path='vcs/nodes.py',
326 commit_id='tip', f_path='vcs/nodes.py',
328 params=dict(annotate=1)),
327 params=dict(annotate=1)),
329
328
330 extra_environ=xhr_header)
329 extra_environ=xhr_header)
331 assert get_node_history(backend.alias) == json.loads(response.body)
330 assert get_node_history(backend.alias) == json.loads(response.body)
332
331
333 def test_tree_search_top_level(self, backend, xhr_header):
332 def test_tree_search_top_level(self, backend, xhr_header):
334 commit = backend.repo.get_commit(commit_idx=173)
333 commit = backend.repo.get_commit(commit_idx=173)
335 response = self.app.get(
334 response = self.app.get(
336 route_path('repo_files_nodelist',
335 route_path('repo_files_nodelist',
337 repo_name=backend.repo_name,
336 repo_name=backend.repo_name,
338 commit_id=commit.raw_id, f_path='/'),
337 commit_id=commit.raw_id, f_path='/'),
339 extra_environ=xhr_header)
338 extra_environ=xhr_header)
340 assert 'nodes' in response.json
339 assert 'nodes' in response.json
341 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
340 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
342
341
343 def test_tree_search_missing_xhr(self, backend):
342 def test_tree_search_missing_xhr(self, backend):
344 self.app.get(
343 self.app.get(
345 route_path('repo_files_nodelist',
344 route_path('repo_files_nodelist',
346 repo_name=backend.repo_name,
345 repo_name=backend.repo_name,
347 commit_id='tip', f_path='/'),
346 commit_id='tip', f_path='/'),
348 status=404)
347 status=404)
349
348
350 def test_tree_search_at_path(self, backend, xhr_header):
349 def test_tree_search_at_path(self, backend, xhr_header):
351 commit = backend.repo.get_commit(commit_idx=173)
350 commit = backend.repo.get_commit(commit_idx=173)
352 response = self.app.get(
351 response = self.app.get(
353 route_path('repo_files_nodelist',
352 route_path('repo_files_nodelist',
354 repo_name=backend.repo_name,
353 repo_name=backend.repo_name,
355 commit_id=commit.raw_id, f_path='/docs'),
354 commit_id=commit.raw_id, f_path='/docs'),
356 extra_environ=xhr_header)
355 extra_environ=xhr_header)
357 assert 'nodes' in response.json
356 assert 'nodes' in response.json
358 nodes = response.json['nodes']
357 nodes = response.json['nodes']
359 assert {'name': 'docs/api', 'type': 'dir'} in nodes
358 assert {'name': 'docs/api', 'type': 'dir'} in nodes
360 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
359 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
361
360
362 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
361 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
363 commit = backend.repo.get_commit(commit_idx=173)
362 commit = backend.repo.get_commit(commit_idx=173)
364 response = self.app.get(
363 response = self.app.get(
365 route_path('repo_files_nodelist',
364 route_path('repo_files_nodelist',
366 repo_name=backend.repo_name,
365 repo_name=backend.repo_name,
367 commit_id=commit.raw_id, f_path='/docs/api'),
366 commit_id=commit.raw_id, f_path='/docs/api'),
368 extra_environ=xhr_header)
367 extra_environ=xhr_header)
369 assert 'nodes' in response.json
368 assert 'nodes' in response.json
370 nodes = response.json['nodes']
369 nodes = response.json['nodes']
371 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
370 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
372
371
373 def test_tree_search_at_path_missing_xhr(self, backend):
372 def test_tree_search_at_path_missing_xhr(self, backend):
374 self.app.get(
373 self.app.get(
375 route_path('repo_files_nodelist',
374 route_path('repo_files_nodelist',
376 repo_name=backend.repo_name,
375 repo_name=backend.repo_name,
377 commit_id='tip', f_path='/docs'),
376 commit_id='tip', f_path='/docs'),
378 status=404)
377 status=404)
379
378
380 def test_nodetree(self, backend, xhr_header):
379 def test_nodetree(self, backend, xhr_header):
381 commit = backend.repo.get_commit(commit_idx=173)
380 commit = backend.repo.get_commit(commit_idx=173)
382 response = self.app.get(
381 response = self.app.get(
383 route_path('repo_nodetree_full',
382 route_path('repo_nodetree_full',
384 repo_name=backend.repo_name,
383 repo_name=backend.repo_name,
385 commit_id=commit.raw_id, f_path='/'),
384 commit_id=commit.raw_id, f_path='/'),
386 extra_environ=xhr_header)
385 extra_environ=xhr_header)
387
386
388 assert_response = response.assert_response()
387 assert_response = response.assert_response()
389
388
390 for attr in ['data-commit-id', 'data-date', 'data-author']:
389 for attr in ['data-commit-id', 'data-date', 'data-author']:
391 elements = assert_response.get_elements('[{}]'.format(attr))
390 elements = assert_response.get_elements('[{}]'.format(attr))
392 assert len(elements) > 1
391 assert len(elements) > 1
393
392
394 for element in elements:
393 for element in elements:
395 assert element.get(attr)
394 assert element.get(attr)
396
395
397 def test_nodetree_if_file(self, backend, xhr_header):
396 def test_nodetree_if_file(self, backend, xhr_header):
398 commit = backend.repo.get_commit(commit_idx=173)
397 commit = backend.repo.get_commit(commit_idx=173)
399 response = self.app.get(
398 response = self.app.get(
400 route_path('repo_nodetree_full',
399 route_path('repo_nodetree_full',
401 repo_name=backend.repo_name,
400 repo_name=backend.repo_name,
402 commit_id=commit.raw_id, f_path='README.rst'),
401 commit_id=commit.raw_id, f_path='README.rst'),
403 extra_environ=xhr_header)
402 extra_environ=xhr_header)
404 assert response.text == ''
403 assert response.text == ''
405
404
406 def test_nodetree_wrong_path(self, backend, xhr_header):
405 def test_nodetree_wrong_path(self, backend, xhr_header):
407 commit = backend.repo.get_commit(commit_idx=173)
406 commit = backend.repo.get_commit(commit_idx=173)
408 response = self.app.get(
407 response = self.app.get(
409 route_path('repo_nodetree_full',
408 route_path('repo_nodetree_full',
410 repo_name=backend.repo_name,
409 repo_name=backend.repo_name,
411 commit_id=commit.raw_id, f_path='/dont-exist'),
410 commit_id=commit.raw_id, f_path='/dont-exist'),
412 extra_environ=xhr_header)
411 extra_environ=xhr_header)
413
412
414 err = 'error: There is no file nor ' \
413 err = 'error: There is no file nor ' \
415 'directory at the given path'
414 'directory at the given path'
416 assert err in response.text
415 assert err in response.text
417
416
418 def test_nodetree_missing_xhr(self, backend):
417 def test_nodetree_missing_xhr(self, backend):
419 self.app.get(
418 self.app.get(
420 route_path('repo_nodetree_full',
419 route_path('repo_nodetree_full',
421 repo_name=backend.repo_name,
420 repo_name=backend.repo_name,
422 commit_id='tip', f_path='/'),
421 commit_id='tip', f_path='/'),
423 status=404)
422 status=404)
424
423
425
424
426 @pytest.mark.usefixtures("app", "autologin_user")
425 @pytest.mark.usefixtures("app", "autologin_user")
427 class TestRawFileHandling(object):
426 class TestRawFileHandling(object):
428
427
429 def test_download_file(self, backend):
428 def test_download_file(self, backend):
430 commit = backend.repo.get_commit(commit_idx=173)
429 commit = backend.repo.get_commit(commit_idx=173)
431 response = self.app.get(
430 response = self.app.get(
432 route_path('repo_file_download',
431 route_path('repo_file_download',
433 repo_name=backend.repo_name,
432 repo_name=backend.repo_name,
434 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
433 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
435
434
436 assert response.content_disposition == 'attachment; filename="nodes.py"; filename*=UTF-8\'\'nodes.py'
435 assert response.content_disposition == 'attachment; filename="nodes.py"; filename*=UTF-8\'\'nodes.py'
437 assert response.content_type == "text/x-python"
436 assert response.content_type == "text/x-python"
438
437
439 def test_download_file_wrong_cs(self, backend):
438 def test_download_file_wrong_cs(self, backend):
440 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
439 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
441
440
442 response = self.app.get(
441 response = self.app.get(
443 route_path('repo_file_download',
442 route_path('repo_file_download',
444 repo_name=backend.repo_name,
443 repo_name=backend.repo_name,
445 commit_id=raw_id, f_path='vcs/nodes.svg'),
444 commit_id=raw_id, f_path='vcs/nodes.svg'),
446 status=404)
445 status=404)
447
446
448 msg = """No such commit exists for this repository"""
447 msg = """No such commit exists for this repository"""
449 response.mustcontain(msg)
448 response.mustcontain(msg)
450
449
451 def test_download_file_wrong_f_path(self, backend):
450 def test_download_file_wrong_f_path(self, backend):
452 commit = backend.repo.get_commit(commit_idx=173)
451 commit = backend.repo.get_commit(commit_idx=173)
453 f_path = 'vcs/ERRORnodes.py'
452 f_path = 'vcs/ERRORnodes.py'
454
453
455 response = self.app.get(
454 response = self.app.get(
456 route_path('repo_file_download',
455 route_path('repo_file_download',
457 repo_name=backend.repo_name,
456 repo_name=backend.repo_name,
458 commit_id=commit.raw_id, f_path=f_path),
457 commit_id=commit.raw_id, f_path=f_path),
459 status=404)
458 status=404)
460
459
461 msg = (
460 msg = (
462 "There is no file nor directory at the given path: "
461 "There is no file nor directory at the given path: "
463 "`%s` at commit %s" % (f_path, commit.short_id))
462 "`%s` at commit %s" % (f_path, commit.short_id))
464 response.mustcontain(msg)
463 response.mustcontain(msg)
465
464
466 def test_file_raw(self, backend):
465 def test_file_raw(self, backend):
467 commit = backend.repo.get_commit(commit_idx=173)
466 commit = backend.repo.get_commit(commit_idx=173)
468 response = self.app.get(
467 response = self.app.get(
469 route_path('repo_file_raw',
468 route_path('repo_file_raw',
470 repo_name=backend.repo_name,
469 repo_name=backend.repo_name,
471 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
470 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
472
471
473 assert response.content_type == "text/plain"
472 assert response.content_type == "text/plain"
474
473
475 def test_file_raw_binary(self, backend):
474 def test_file_raw_binary(self, backend):
476 commit = backend.repo.get_commit()
475 commit = backend.repo.get_commit()
477 response = self.app.get(
476 response = self.app.get(
478 route_path('repo_file_raw',
477 route_path('repo_file_raw',
479 repo_name=backend.repo_name,
478 repo_name=backend.repo_name,
480 commit_id=commit.raw_id,
479 commit_id=commit.raw_id,
481 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
480 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
482
481
483 assert response.content_disposition == 'inline'
482 assert response.content_disposition == 'inline'
484
483
485 def test_raw_file_wrong_cs(self, backend):
484 def test_raw_file_wrong_cs(self, backend):
486 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
485 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
487
486
488 response = self.app.get(
487 response = self.app.get(
489 route_path('repo_file_raw',
488 route_path('repo_file_raw',
490 repo_name=backend.repo_name,
489 repo_name=backend.repo_name,
491 commit_id=raw_id, f_path='vcs/nodes.svg'),
490 commit_id=raw_id, f_path='vcs/nodes.svg'),
492 status=404)
491 status=404)
493
492
494 msg = """No such commit exists for this repository"""
493 msg = """No such commit exists for this repository"""
495 response.mustcontain(msg)
494 response.mustcontain(msg)
496
495
497 def test_raw_wrong_f_path(self, backend):
496 def test_raw_wrong_f_path(self, backend):
498 commit = backend.repo.get_commit(commit_idx=173)
497 commit = backend.repo.get_commit(commit_idx=173)
499 f_path = 'vcs/ERRORnodes.py'
498 f_path = 'vcs/ERRORnodes.py'
500 response = self.app.get(
499 response = self.app.get(
501 route_path('repo_file_raw',
500 route_path('repo_file_raw',
502 repo_name=backend.repo_name,
501 repo_name=backend.repo_name,
503 commit_id=commit.raw_id, f_path=f_path),
502 commit_id=commit.raw_id, f_path=f_path),
504 status=404)
503 status=404)
505
504
506 msg = (
505 msg = (
507 "There is no file nor directory at the given path: "
506 "There is no file nor directory at the given path: "
508 "`%s` at commit %s" % (f_path, commit.short_id))
507 "`%s` at commit %s" % (f_path, commit.short_id))
509 response.mustcontain(msg)
508 response.mustcontain(msg)
510
509
511 def test_raw_svg_should_not_be_rendered(self, backend):
510 def test_raw_svg_should_not_be_rendered(self, backend):
512 backend.create_repo()
511 backend.create_repo()
513 backend.ensure_file(b"xss.svg")
512 backend.ensure_file(b"xss.svg")
514 response = self.app.get(
513 response = self.app.get(
515 route_path('repo_file_raw',
514 route_path('repo_file_raw',
516 repo_name=backend.repo_name,
515 repo_name=backend.repo_name,
517 commit_id='tip', f_path='xss.svg'),)
516 commit_id='tip', f_path='xss.svg'),)
518 # If the content type is image/svg+xml then it allows to render HTML
517 # If the content type is image/svg+xml then it allows to render HTML
519 # and malicious SVG.
518 # and malicious SVG.
520 assert response.content_type == "text/plain"
519 assert response.content_type == "text/plain"
521
520
522
521
523 @pytest.mark.usefixtures("app")
522 @pytest.mark.usefixtures("app")
524 class TestRepositoryArchival(object):
523 class TestRepositoryArchival(object):
525
524
526 def test_archival(self, backend):
525 def test_archival(self, backend):
527 backend.enable_downloads()
526 backend.enable_downloads()
528 commit = backend.repo.get_commit(commit_idx=173)
527 commit = backend.repo.get_commit(commit_idx=173)
528
529 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
529 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
530 path_sha = get_path_sha('/')
530 path_sha = get_path_sha('/')
531 filename = get_archive_name(backend.repo_name, commit_sha=commit.short_id, ext=extension, path_sha=path_sha)
531 filename = get_archive_name(backend.repo_id, backend.repo_name, commit_sha=commit.short_id, ext=extension, path_sha=path_sha)
532
532
533 fname = commit.raw_id + extension
533 fname = commit.raw_id + extension
534 response = self.app.get(
534 response = self.app.get(
535 route_path('repo_archivefile',
535 route_path('repo_archivefile',
536 repo_name=backend.repo_name,
536 repo_name=backend.repo_name,
537 fname=fname))
537 fname=fname))
538
538
539 assert response.status == '200 OK'
539 assert response.status == '200 OK'
540 headers = [
540 headers = [
541 ('Content-Disposition', 'attachment; filename=%s' % filename),
541 ('Content-Disposition', f'attachment; filename={filename}'),
542 ('Content-Type', '%s' % content_type),
542 ('Content-Type', content_type),
543 ]
543 ]
544
544
545 for header in headers:
545 for header in headers:
546 assert header in response.headers.items()
546 assert header in list(response.headers.items())
547
547
548 def test_archival_no_hash(self, backend):
548 def test_archival_no_hash(self, backend):
549 backend.enable_downloads()
549 backend.enable_downloads()
550 commit = backend.repo.get_commit(commit_idx=173)
550 commit = backend.repo.get_commit(commit_idx=173)
551 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
551 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
552 path_sha = get_path_sha('/')
552 path_sha = get_path_sha('/')
553 filename = get_archive_name(backend.repo_name, commit_sha=commit.short_id, ext=extension, path_sha=path_sha, with_hash=False)
553 filename = get_archive_name(backend.repo_id, backend.repo_name, commit_sha=commit.short_id, ext=extension, path_sha=path_sha, with_hash=False)
554
554
555 fname = commit.raw_id + extension
555 fname = commit.raw_id + extension
556 response = self.app.get(
556 response = self.app.get(
557 route_path('repo_archivefile',
557 route_path('repo_archivefile',
558 repo_name=backend.repo_name,
558 repo_name=backend.repo_name,
559 fname=fname, params={'with_hash': 0}))
559 fname=fname, params={'with_hash': 0}))
560
560
561 assert response.status == '200 OK'
561 assert response.status == '200 OK'
562 headers = [
562 headers = [
563 ('Content-Disposition', 'attachment; filename=%s' % filename),
563 ('Content-Disposition', f'attachment; filename={filename}'),
564 ('Content-Type', '%s' % content_type),
564 ('Content-Type', content_type),
565 ]
565 ]
566
566
567 for header in headers:
567 for header in headers:
568 assert header in list(response.headers.items())
568 assert header in list(response.headers.items())
569
569
570 def test_archival_at_path(self, backend):
570 def test_archival_at_path(self, backend):
571 backend.enable_downloads()
571 backend.enable_downloads()
572 commit = backend.repo.get_commit(commit_idx=190)
572 commit = backend.repo.get_commit(commit_idx=190)
573 at_path = 'vcs'
573 at_path = 'vcs'
574
574
575 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
575 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
576 path_sha = get_path_sha(at_path)
576 path_sha = get_path_sha(at_path)
577 filename = get_archive_name(backend.repo_name, commit_sha=commit.short_id, ext=extension, path_sha=path_sha)
577 filename = get_archive_name(backend.repo_id, backend.repo_name, commit_sha=commit.short_id, ext=extension, path_sha=path_sha)
578
578
579 fname = commit.raw_id + extension
579 fname = commit.raw_id + extension
580 response = self.app.get(
580 response = self.app.get(
581 route_path('repo_archivefile',
581 route_path('repo_archivefile',
582 repo_name=backend.repo_name,
582 repo_name=backend.repo_name,
583 fname=fname, params={'at_path': at_path}))
583 fname=fname, params={'at_path': at_path}))
584
584
585 assert response.status == '200 OK'
585 assert response.status == '200 OK'
586 headers = [
586 headers = [
587 ('Content-Disposition', 'attachment; filename=%s' % filename),
587 ('Content-Disposition', f'attachment; filename={filename}'),
588 ('Content-Type', '%s' % content_type),
588 ('Content-Type', content_type),
589 ]
589 ]
590
590
591 for header in headers:
591 for header in headers:
592 assert header in list(response.headers.items())
592 assert header in list(response.headers.items())
593
593
594 @pytest.mark.parametrize('arch_ext',[
594 @pytest.mark.parametrize('arch_ext',[
595 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
595 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
596 def test_archival_wrong_ext(self, backend, arch_ext):
596 def test_archival_wrong_ext(self, backend, arch_ext):
597 backend.enable_downloads()
597 backend.enable_downloads()
598 commit = backend.repo.get_commit(commit_idx=173)
598 commit = backend.repo.get_commit(commit_idx=173)
599
599
600 fname = commit.raw_id + '.' + arch_ext
600 fname = commit.raw_id + '.' + arch_ext
601
601
602 response = self.app.get(
602 response = self.app.get(
603 route_path('repo_archivefile',
603 route_path('repo_archivefile',
604 repo_name=backend.repo_name,
604 repo_name=backend.repo_name,
605 fname=fname))
605 fname=fname))
606 response.mustcontain(
606 response.mustcontain(
607 'Unknown archive type for: `{}`'.format(fname))
607 'Unknown archive type for: `{}`'.format(fname))
608
608
609 @pytest.mark.parametrize('commit_id', [
609 @pytest.mark.parametrize('commit_id', [
610 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
610 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
611 def test_archival_wrong_commit_id(self, backend, commit_id):
611 def test_archival_wrong_commit_id(self, backend, commit_id):
612 backend.enable_downloads()
612 backend.enable_downloads()
613 fname = '%s.zip' % commit_id
613 fname = f'{commit_id}.zip'
614
614
615 response = self.app.get(
615 response = self.app.get(
616 route_path('repo_archivefile',
616 route_path('repo_archivefile',
617 repo_name=backend.repo_name,
617 repo_name=backend.repo_name,
618 fname=fname))
618 fname=fname))
619 response.mustcontain('Unknown commit_id')
619 response.mustcontain('Unknown commit_id')
620
620
621
621
622 @pytest.mark.usefixtures("app")
622 @pytest.mark.usefixtures("app")
623 class TestFilesDiff(object):
623 class TestFilesDiff(object):
624
624
625 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
625 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
626 def test_file_full_diff(self, backend, diff):
626 def test_file_full_diff(self, backend, diff):
627 commit1 = backend.repo.get_commit(commit_idx=-1)
627 commit1 = backend.repo.get_commit(commit_idx=-1)
628 commit2 = backend.repo.get_commit(commit_idx=-2)
628 commit2 = backend.repo.get_commit(commit_idx=-2)
629
629
630 response = self.app.get(
630 response = self.app.get(
631 route_path('repo_files_diff',
631 route_path('repo_files_diff',
632 repo_name=backend.repo_name,
632 repo_name=backend.repo_name,
633 f_path='README'),
633 f_path='README'),
634 params={
634 params={
635 'diff1': commit2.raw_id,
635 'diff1': commit2.raw_id,
636 'diff2': commit1.raw_id,
636 'diff2': commit1.raw_id,
637 'fulldiff': '1',
637 'fulldiff': '1',
638 'diff': diff,
638 'diff': diff,
639 })
639 })
640
640
641 if diff == 'diff':
641 if diff == 'diff':
642 # use redirect since this is OLD view redirecting to compare page
642 # use redirect since this is OLD view redirecting to compare page
643 response = response.follow()
643 response = response.follow()
644
644
645 # It's a symlink to README.rst
645 # It's a symlink to README.rst
646 response.mustcontain('README.rst')
646 response.mustcontain('README.rst')
647 response.mustcontain('No newline at end of file')
647 response.mustcontain('No newline at end of file')
648
648
649 def test_file_binary_diff(self, backend):
649 def test_file_binary_diff(self, backend):
650 commits = [
650 commits = [
651 {'message': 'First commit'},
651 {'message': 'First commit'},
652 {'message': 'Commit with binary',
652 {'message': 'Commit with binary',
653 'added': [nodes.FileNode(b'file.bin', content='\0BINARY\0')]},
653 'added': [nodes.FileNode(b'file.bin', content='\0BINARY\0')]},
654 ]
654 ]
655 repo = backend.create_repo(commits=commits)
655 repo = backend.create_repo(commits=commits)
656
656
657 response = self.app.get(
657 response = self.app.get(
658 route_path('repo_files_diff',
658 route_path('repo_files_diff',
659 repo_name=backend.repo_name,
659 repo_name=backend.repo_name,
660 f_path='file.bin'),
660 f_path='file.bin'),
661 params={
661 params={
662 'diff1': repo.get_commit(commit_idx=0).raw_id,
662 'diff1': repo.get_commit(commit_idx=0).raw_id,
663 'diff2': repo.get_commit(commit_idx=1).raw_id,
663 'diff2': repo.get_commit(commit_idx=1).raw_id,
664 'fulldiff': '1',
664 'fulldiff': '1',
665 'diff': 'diff',
665 'diff': 'diff',
666 })
666 })
667 # use redirect since this is OLD view redirecting to compare page
667 # use redirect since this is OLD view redirecting to compare page
668 response = response.follow()
668 response = response.follow()
669 response.mustcontain('Collapse 1 commit')
669 response.mustcontain('Collapse 1 commit')
670 file_changes = (1, 0, 0)
670 file_changes = (1, 0, 0)
671
671
672 compare_page = ComparePage(response)
672 compare_page = ComparePage(response)
673 compare_page.contains_change_summary(*file_changes)
673 compare_page.contains_change_summary(*file_changes)
674
674
675 if backend.alias == 'svn':
675 if backend.alias == 'svn':
676 response.mustcontain('new file 10644')
676 response.mustcontain('new file 10644')
677 # TODO(marcink): SVN doesn't yet detect binary changes
677 # TODO(marcink): SVN doesn't yet detect binary changes
678 else:
678 else:
679 response.mustcontain('new file 100644')
679 response.mustcontain('new file 100644')
680 response.mustcontain('binary diff hidden')
680 response.mustcontain('binary diff hidden')
681
681
682 def test_diff_2way(self, backend):
682 def test_diff_2way(self, backend):
683 commit1 = backend.repo.get_commit(commit_idx=-1)
683 commit1 = backend.repo.get_commit(commit_idx=-1)
684 commit2 = backend.repo.get_commit(commit_idx=-2)
684 commit2 = backend.repo.get_commit(commit_idx=-2)
685 response = self.app.get(
685 response = self.app.get(
686 route_path('repo_files_diff_2way_redirect',
686 route_path('repo_files_diff_2way_redirect',
687 repo_name=backend.repo_name,
687 repo_name=backend.repo_name,
688 f_path='README'),
688 f_path='README'),
689 params={
689 params={
690 'diff1': commit2.raw_id,
690 'diff1': commit2.raw_id,
691 'diff2': commit1.raw_id,
691 'diff2': commit1.raw_id,
692 })
692 })
693 # use redirect since this is OLD view redirecting to compare page
693 # use redirect since this is OLD view redirecting to compare page
694 response = response.follow()
694 response = response.follow()
695
695
696 # It's a symlink to README.rst
696 # It's a symlink to README.rst
697 response.mustcontain('README.rst')
697 response.mustcontain('README.rst')
698 response.mustcontain('No newline at end of file')
698 response.mustcontain('No newline at end of file')
699
699
700 def test_requires_one_commit_id(self, backend, autologin_user):
700 def test_requires_one_commit_id(self, backend, autologin_user):
701 response = self.app.get(
701 response = self.app.get(
702 route_path('repo_files_diff',
702 route_path('repo_files_diff',
703 repo_name=backend.repo_name,
703 repo_name=backend.repo_name,
704 f_path='README.rst'),
704 f_path='README.rst'),
705 status=400)
705 status=400)
706 response.mustcontain(
706 response.mustcontain(
707 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
707 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
708
708
709 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
709 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
710 repo = vcsbackend.repo
710 repo = vcsbackend.repo
711 response = self.app.get(
711 response = self.app.get(
712 route_path('repo_files_diff',
712 route_path('repo_files_diff',
713 repo_name=repo.name,
713 repo_name=repo.name,
714 f_path='does-not-exist-in-any-commit'),
714 f_path='does-not-exist-in-any-commit'),
715 params={
715 params={
716 'diff1': repo[0].raw_id,
716 'diff1': repo[0].raw_id,
717 'diff2': repo[1].raw_id
717 'diff2': repo[1].raw_id
718 })
718 })
719
719
720 response = response.follow()
720 response = response.follow()
721 response.mustcontain('No files')
721 response.mustcontain('No files')
722
722
723 def test_returns_redirect_if_file_not_changed(self, backend):
723 def test_returns_redirect_if_file_not_changed(self, backend):
724 commit = backend.repo.get_commit(commit_idx=-1)
724 commit = backend.repo.get_commit(commit_idx=-1)
725 response = self.app.get(
725 response = self.app.get(
726 route_path('repo_files_diff_2way_redirect',
726 route_path('repo_files_diff_2way_redirect',
727 repo_name=backend.repo_name,
727 repo_name=backend.repo_name,
728 f_path='README'),
728 f_path='README'),
729 params={
729 params={
730 'diff1': commit.raw_id,
730 'diff1': commit.raw_id,
731 'diff2': commit.raw_id,
731 'diff2': commit.raw_id,
732 })
732 })
733
733
734 response = response.follow()
734 response = response.follow()
735 response.mustcontain('No files')
735 response.mustcontain('No files')
736 response.mustcontain('No commits in this compare')
736 response.mustcontain('No commits in this compare')
737
737
738 def test_supports_diff_to_different_path_svn(self, backend_svn):
738 def test_supports_diff_to_different_path_svn(self, backend_svn):
739 #TODO: check this case
739 #TODO: check this case
740 return
740 return
741
741
742 repo = backend_svn['svn-simple-layout'].scm_instance()
742 repo = backend_svn['svn-simple-layout'].scm_instance()
743 commit_id_1 = '24'
743 commit_id_1 = '24'
744 commit_id_2 = '26'
744 commit_id_2 = '26'
745
745
746 response = self.app.get(
746 response = self.app.get(
747 route_path('repo_files_diff',
747 route_path('repo_files_diff',
748 repo_name=backend_svn.repo_name,
748 repo_name=backend_svn.repo_name,
749 f_path='trunk/example.py'),
749 f_path='trunk/example.py'),
750 params={
750 params={
751 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
751 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
752 'diff2': commit_id_2,
752 'diff2': commit_id_2,
753 })
753 })
754
754
755 response = response.follow()
755 response = response.follow()
756 response.mustcontain(
756 response.mustcontain(
757 # diff contains this
757 # diff contains this
758 "Will print out a useful message on invocation.")
758 "Will print out a useful message on invocation.")
759
759
760 # Note: Expecting that we indicate the user what's being compared
760 # Note: Expecting that we indicate the user what's being compared
761 response.mustcontain("trunk/example.py")
761 response.mustcontain("trunk/example.py")
762 response.mustcontain("tags/v0.2/example.py")
762 response.mustcontain("tags/v0.2/example.py")
763
763
764 def test_show_rev_redirects_to_svn_path(self, backend_svn):
764 def test_show_rev_redirects_to_svn_path(self, backend_svn):
765 #TODO: check this case
765 #TODO: check this case
766 return
766 return
767
767
768 repo = backend_svn['svn-simple-layout'].scm_instance()
768 repo = backend_svn['svn-simple-layout'].scm_instance()
769 commit_id = repo[-1].raw_id
769 commit_id = repo[-1].raw_id
770
770
771 response = self.app.get(
771 response = self.app.get(
772 route_path('repo_files_diff',
772 route_path('repo_files_diff',
773 repo_name=backend_svn.repo_name,
773 repo_name=backend_svn.repo_name,
774 f_path='trunk/example.py'),
774 f_path='trunk/example.py'),
775 params={
775 params={
776 'diff1': 'branches/argparse/example.py@' + commit_id,
776 'diff1': 'branches/argparse/example.py@' + commit_id,
777 'diff2': commit_id,
777 'diff2': commit_id,
778 },
778 },
779 status=302)
779 status=302)
780 response = response.follow()
780 response = response.follow()
781 assert response.headers['Location'].endswith(
781 assert response.headers['Location'].endswith(
782 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
782 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
783
783
784 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
784 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
785 #TODO: check this case
785 #TODO: check this case
786 return
786 return
787
787
788 repo = backend_svn['svn-simple-layout'].scm_instance()
788 repo = backend_svn['svn-simple-layout'].scm_instance()
789 commit_id = repo[-1].raw_id
789 commit_id = repo[-1].raw_id
790 response = self.app.get(
790 response = self.app.get(
791 route_path('repo_files_diff',
791 route_path('repo_files_diff',
792 repo_name=backend_svn.repo_name,
792 repo_name=backend_svn.repo_name,
793 f_path='trunk/example.py'),
793 f_path='trunk/example.py'),
794 params={
794 params={
795 'diff1': 'branches/argparse/example.py@' + commit_id,
795 'diff1': 'branches/argparse/example.py@' + commit_id,
796 'diff2': commit_id,
796 'diff2': commit_id,
797 'show_rev': 'Show at Revision',
797 'show_rev': 'Show at Revision',
798 'annotate': 'true',
798 'annotate': 'true',
799 },
799 },
800 status=302)
800 status=302)
801 response = response.follow()
801 response = response.follow()
802 assert response.headers['Location'].endswith(
802 assert response.headers['Location'].endswith(
803 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
803 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
804
804
805
805
806 @pytest.mark.usefixtures("app", "autologin_user")
806 @pytest.mark.usefixtures("app", "autologin_user")
807 class TestModifyFilesWithWebInterface(object):
807 class TestModifyFilesWithWebInterface(object):
808
808
809 def test_add_file_view(self, backend):
809 def test_add_file_view(self, backend):
810 self.app.get(
810 self.app.get(
811 route_path('repo_files_add_file',
811 route_path('repo_files_add_file',
812 repo_name=backend.repo_name,
812 repo_name=backend.repo_name,
813 commit_id='tip', f_path='/')
813 commit_id='tip', f_path='/')
814 )
814 )
815
815
816 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
816 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
817 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
817 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
818 backend.create_repo()
818 backend.create_repo()
819 filename = 'init.py'
819 filename = 'init.py'
820 response = self.app.post(
820 response = self.app.post(
821 route_path('repo_files_create_file',
821 route_path('repo_files_create_file',
822 repo_name=backend.repo_name,
822 repo_name=backend.repo_name,
823 commit_id='tip', f_path='/'),
823 commit_id='tip', f_path='/'),
824 params={
824 params={
825 'content': "",
825 'content': "",
826 'filename': filename,
826 'filename': filename,
827 'csrf_token': csrf_token,
827 'csrf_token': csrf_token,
828 },
828 },
829 status=302)
829 status=302)
830 expected_msg = 'Successfully committed new file `{}`'.format(os.path.join(filename))
830 expected_msg = 'Successfully committed new file `{}`'.format(os.path.join(filename))
831 assert_session_flash(response, expected_msg)
831 assert_session_flash(response, expected_msg)
832
832
833 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
833 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
834 commit_id = backend.repo.get_commit().raw_id
834 commit_id = backend.repo.get_commit().raw_id
835 response = self.app.post(
835 response = self.app.post(
836 route_path('repo_files_create_file',
836 route_path('repo_files_create_file',
837 repo_name=backend.repo_name,
837 repo_name=backend.repo_name,
838 commit_id=commit_id, f_path='/'),
838 commit_id=commit_id, f_path='/'),
839 params={
839 params={
840 'content': "foo",
840 'content': "foo",
841 'csrf_token': csrf_token,
841 'csrf_token': csrf_token,
842 },
842 },
843 status=302)
843 status=302)
844
844
845 assert_session_flash(response, 'No filename specified')
845 assert_session_flash(response, 'No filename specified')
846
846
847 def test_add_file_into_repo_errors_and_no_commits(
847 def test_add_file_into_repo_errors_and_no_commits(
848 self, backend, csrf_token):
848 self, backend, csrf_token):
849 repo = backend.create_repo()
849 repo = backend.create_repo()
850 # Create a file with no filename, it will display an error but
850 # Create a file with no filename, it will display an error but
851 # the repo has no commits yet
851 # the repo has no commits yet
852 response = self.app.post(
852 response = self.app.post(
853 route_path('repo_files_create_file',
853 route_path('repo_files_create_file',
854 repo_name=repo.repo_name,
854 repo_name=repo.repo_name,
855 commit_id='tip', f_path='/'),
855 commit_id='tip', f_path='/'),
856 params={
856 params={
857 'content': "foo",
857 'content': "foo",
858 'csrf_token': csrf_token,
858 'csrf_token': csrf_token,
859 },
859 },
860 status=302)
860 status=302)
861
861
862 assert_session_flash(response, 'No filename specified')
862 assert_session_flash(response, 'No filename specified')
863
863
864 # Not allowed, redirect to the summary
864 # Not allowed, redirect to the summary
865 redirected = response.follow()
865 redirected = response.follow()
866 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
866 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
867
867
868 # As there are no commits, displays the summary page with the error of
868 # As there are no commits, displays the summary page with the error of
869 # creating a file with no filename
869 # creating a file with no filename
870
870
871 assert redirected.request.path == summary_url
871 assert redirected.request.path == summary_url
872
872
873 @pytest.mark.parametrize("filename, clean_filename", [
873 @pytest.mark.parametrize("filename, clean_filename", [
874 ('/abs/foo', 'abs/foo'),
874 ('/abs/foo', 'abs/foo'),
875 ('../rel/foo', 'rel/foo'),
875 ('../rel/foo', 'rel/foo'),
876 ('file/../foo/foo', 'file/foo/foo'),
876 ('file/../foo/foo', 'file/foo/foo'),
877 ])
877 ])
878 def test_add_file_into_repo_bad_filenames(self, filename, clean_filename, backend, csrf_token):
878 def test_add_file_into_repo_bad_filenames(self, filename, clean_filename, backend, csrf_token):
879 repo = backend.create_repo()
879 repo = backend.create_repo()
880 commit_id = repo.get_commit().raw_id
880 commit_id = repo.get_commit().raw_id
881
881
882 response = self.app.post(
882 response = self.app.post(
883 route_path('repo_files_create_file',
883 route_path('repo_files_create_file',
884 repo_name=repo.repo_name,
884 repo_name=repo.repo_name,
885 commit_id=commit_id, f_path='/'),
885 commit_id=commit_id, f_path='/'),
886 params={
886 params={
887 'content': "foo",
887 'content': "foo",
888 'filename': filename,
888 'filename': filename,
889 'csrf_token': csrf_token,
889 'csrf_token': csrf_token,
890 },
890 },
891 status=302)
891 status=302)
892
892
893 expected_msg = 'Successfully committed new file `{}`'.format(clean_filename)
893 expected_msg = 'Successfully committed new file `{}`'.format(clean_filename)
894 assert_session_flash(response, expected_msg)
894 assert_session_flash(response, expected_msg)
895
895
896 @pytest.mark.parametrize("cnt, filename, content", [
896 @pytest.mark.parametrize("cnt, filename, content", [
897 (1, 'foo.txt', "Content"),
897 (1, 'foo.txt', "Content"),
898 (2, 'dir/foo.rst', "Content"),
898 (2, 'dir/foo.rst', "Content"),
899 (3, 'dir/foo-second.rst', "Content"),
899 (3, 'dir/foo-second.rst', "Content"),
900 (4, 'rel/dir/foo.bar', "Content"),
900 (4, 'rel/dir/foo.bar', "Content"),
901 ])
901 ])
902 def test_add_file_into_empty_repo(self, cnt, filename, content, backend, csrf_token):
902 def test_add_file_into_empty_repo(self, cnt, filename, content, backend, csrf_token):
903 repo = backend.create_repo()
903 repo = backend.create_repo()
904 commit_id = repo.get_commit().raw_id
904 commit_id = repo.get_commit().raw_id
905 response = self.app.post(
905 response = self.app.post(
906 route_path('repo_files_create_file',
906 route_path('repo_files_create_file',
907 repo_name=repo.repo_name,
907 repo_name=repo.repo_name,
908 commit_id=commit_id, f_path='/'),
908 commit_id=commit_id, f_path='/'),
909 params={
909 params={
910 'content': content,
910 'content': content,
911 'filename': filename,
911 'filename': filename,
912 'csrf_token': csrf_token,
912 'csrf_token': csrf_token,
913 },
913 },
914 status=302)
914 status=302)
915
915
916 expected_msg = 'Successfully committed new file `{}`'.format(filename)
916 expected_msg = 'Successfully committed new file `{}`'.format(filename)
917 assert_session_flash(response, expected_msg)
917 assert_session_flash(response, expected_msg)
918
918
919 def test_edit_file_view(self, backend):
919 def test_edit_file_view(self, backend):
920 response = self.app.get(
920 response = self.app.get(
921 route_path('repo_files_edit_file',
921 route_path('repo_files_edit_file',
922 repo_name=backend.repo_name,
922 repo_name=backend.repo_name,
923 commit_id=backend.default_head_id,
923 commit_id=backend.default_head_id,
924 f_path='vcs/nodes.py'),
924 f_path='vcs/nodes.py'),
925 status=200)
925 status=200)
926 response.mustcontain("Module holding everything related to vcs nodes.")
926 response.mustcontain("Module holding everything related to vcs nodes.")
927
927
928 def test_edit_file_view_not_on_branch(self, backend):
928 def test_edit_file_view_not_on_branch(self, backend):
929 repo = backend.create_repo()
929 repo = backend.create_repo()
930 backend.ensure_file(b"vcs/nodes.py")
930 backend.ensure_file(b"vcs/nodes.py")
931
931
932 response = self.app.get(
932 response = self.app.get(
933 route_path('repo_files_edit_file',
933 route_path('repo_files_edit_file',
934 repo_name=repo.repo_name,
934 repo_name=repo.repo_name,
935 commit_id='tip',
935 commit_id='tip',
936 f_path='vcs/nodes.py'),
936 f_path='vcs/nodes.py'),
937 status=302)
937 status=302)
938 assert_session_flash(
938 assert_session_flash(
939 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
939 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
940
940
941 def test_edit_file_view_commit_changes(self, backend, csrf_token):
941 def test_edit_file_view_commit_changes(self, backend, csrf_token):
942 repo = backend.create_repo()
942 repo = backend.create_repo()
943 backend.ensure_file(b"vcs/nodes.py", content=b"print 'hello'")
943 backend.ensure_file(b"vcs/nodes.py", content=b"print 'hello'")
944
944
945 response = self.app.post(
945 response = self.app.post(
946 route_path('repo_files_update_file',
946 route_path('repo_files_update_file',
947 repo_name=repo.repo_name,
947 repo_name=repo.repo_name,
948 commit_id=backend.default_head_id,
948 commit_id=backend.default_head_id,
949 f_path='vcs/nodes.py'),
949 f_path='vcs/nodes.py'),
950 params={
950 params={
951 'content': "print 'hello world'",
951 'content': "print 'hello world'",
952 'message': 'I committed',
952 'message': 'I committed',
953 'filename': "vcs/nodes.py",
953 'filename': "vcs/nodes.py",
954 'csrf_token': csrf_token,
954 'csrf_token': csrf_token,
955 },
955 },
956 status=302)
956 status=302)
957 assert_session_flash(
957 assert_session_flash(
958 response, 'Successfully committed changes to file `vcs/nodes.py`')
958 response, 'Successfully committed changes to file `vcs/nodes.py`')
959 tip = repo.get_commit(commit_idx=-1)
959 tip = repo.get_commit(commit_idx=-1)
960 assert tip.message == 'I committed'
960 assert tip.message == 'I committed'
961
961
962 def test_edit_file_view_commit_changes_default_message(self, backend,
962 def test_edit_file_view_commit_changes_default_message(self, backend,
963 csrf_token):
963 csrf_token):
964 repo = backend.create_repo()
964 repo = backend.create_repo()
965 backend.ensure_file(b"vcs/nodes.py", content=b"print 'hello'")
965 backend.ensure_file(b"vcs/nodes.py", content=b"print 'hello'")
966
966
967 commit_id = (
967 commit_id = (
968 backend.default_branch_name or
968 backend.default_branch_name or
969 backend.repo.scm_instance().commit_ids[-1])
969 backend.repo.scm_instance().commit_ids[-1])
970
970
971 response = self.app.post(
971 response = self.app.post(
972 route_path('repo_files_update_file',
972 route_path('repo_files_update_file',
973 repo_name=repo.repo_name,
973 repo_name=repo.repo_name,
974 commit_id=commit_id,
974 commit_id=commit_id,
975 f_path='vcs/nodes.py'),
975 f_path='vcs/nodes.py'),
976 params={
976 params={
977 'content': "print 'hello world'",
977 'content': "print 'hello world'",
978 'message': '',
978 'message': '',
979 'filename': "vcs/nodes.py",
979 'filename': "vcs/nodes.py",
980 'csrf_token': csrf_token,
980 'csrf_token': csrf_token,
981 },
981 },
982 status=302)
982 status=302)
983 assert_session_flash(
983 assert_session_flash(
984 response, 'Successfully committed changes to file `vcs/nodes.py`')
984 response, 'Successfully committed changes to file `vcs/nodes.py`')
985 tip = repo.get_commit(commit_idx=-1)
985 tip = repo.get_commit(commit_idx=-1)
986 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
986 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
987
987
988 def test_delete_file_view(self, backend):
988 def test_delete_file_view(self, backend):
989 self.app.get(
989 self.app.get(
990 route_path('repo_files_remove_file',
990 route_path('repo_files_remove_file',
991 repo_name=backend.repo_name,
991 repo_name=backend.repo_name,
992 commit_id=backend.default_head_id,
992 commit_id=backend.default_head_id,
993 f_path='vcs/nodes.py'),
993 f_path='vcs/nodes.py'),
994 status=200)
994 status=200)
995
995
996 def test_delete_file_view_not_on_branch(self, backend):
996 def test_delete_file_view_not_on_branch(self, backend):
997 repo = backend.create_repo()
997 repo = backend.create_repo()
998 backend.ensure_file(b'vcs/nodes.py')
998 backend.ensure_file(b'vcs/nodes.py')
999
999
1000 response = self.app.get(
1000 response = self.app.get(
1001 route_path('repo_files_remove_file',
1001 route_path('repo_files_remove_file',
1002 repo_name=repo.repo_name,
1002 repo_name=repo.repo_name,
1003 commit_id='tip',
1003 commit_id='tip',
1004 f_path='vcs/nodes.py'),
1004 f_path='vcs/nodes.py'),
1005 status=302)
1005 status=302)
1006 assert_session_flash(
1006 assert_session_flash(
1007 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
1007 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
1008
1008
1009 def test_delete_file_view_commit_changes(self, backend, csrf_token):
1009 def test_delete_file_view_commit_changes(self, backend, csrf_token):
1010 repo = backend.create_repo()
1010 repo = backend.create_repo()
1011 backend.ensure_file(b"vcs/nodes.py")
1011 backend.ensure_file(b"vcs/nodes.py")
1012
1012
1013 response = self.app.post(
1013 response = self.app.post(
1014 route_path('repo_files_delete_file',
1014 route_path('repo_files_delete_file',
1015 repo_name=repo.repo_name,
1015 repo_name=repo.repo_name,
1016 commit_id=backend.default_head_id,
1016 commit_id=backend.default_head_id,
1017 f_path='vcs/nodes.py'),
1017 f_path='vcs/nodes.py'),
1018 params={
1018 params={
1019 'message': 'i committed',
1019 'message': 'i committed',
1020 'csrf_token': csrf_token,
1020 'csrf_token': csrf_token,
1021 },
1021 },
1022 status=302)
1022 status=302)
1023 assert_session_flash(
1023 assert_session_flash(
1024 response, 'Successfully deleted file `vcs/nodes.py`')
1024 response, 'Successfully deleted file `vcs/nodes.py`')
1025
1025
1026
1026
1027 @pytest.mark.usefixtures("app")
1027 @pytest.mark.usefixtures("app")
1028 class TestFilesViewOtherCases(object):
1028 class TestFilesViewOtherCases(object):
1029
1029
1030 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
1030 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
1031 self, backend_stub, autologin_regular_user, user_regular,
1031 self, backend_stub, autologin_regular_user, user_regular,
1032 user_util):
1032 user_util):
1033
1033
1034 repo = backend_stub.create_repo()
1034 repo = backend_stub.create_repo()
1035 user_util.grant_user_permission_to_repo(
1035 user_util.grant_user_permission_to_repo(
1036 repo, user_regular, 'repository.write')
1036 repo, user_regular, 'repository.write')
1037 response = self.app.get(
1037 response = self.app.get(
1038 route_path('repo_files',
1038 route_path('repo_files',
1039 repo_name=repo.repo_name,
1039 repo_name=repo.repo_name,
1040 commit_id='tip', f_path='/'))
1040 commit_id='tip', f_path='/'))
1041
1041
1042 repo_file_add_url = route_path(
1042 repo_file_add_url = route_path(
1043 'repo_files_add_file',
1043 'repo_files_add_file',
1044 repo_name=repo.repo_name,
1044 repo_name=repo.repo_name,
1045 commit_id=0, f_path='')
1045 commit_id=0, f_path='')
1046 add_new = f'<a class="alert-link" href="{repo_file_add_url}">add a new file</a>'
1046 add_new = f'<a class="alert-link" href="{repo_file_add_url}">add a new file</a>'
1047
1047
1048 repo_file_upload_url = route_path(
1048 repo_file_upload_url = route_path(
1049 'repo_files_upload_file',
1049 'repo_files_upload_file',
1050 repo_name=repo.repo_name,
1050 repo_name=repo.repo_name,
1051 commit_id=0, f_path='')
1051 commit_id=0, f_path='')
1052 upload_new = f'<a class="alert-link" href="{repo_file_upload_url}">upload a new file</a>'
1052 upload_new = f'<a class="alert-link" href="{repo_file_upload_url}">upload a new file</a>'
1053
1053
1054 assert_session_flash(
1054 assert_session_flash(
1055 response,
1055 response,
1056 'There are no files yet. Click here to %s or %s.' % (add_new, upload_new)
1056 'There are no files yet. Click here to %s or %s.' % (add_new, upload_new)
1057 )
1057 )
1058
1058
1059 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1059 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1060 self, backend_stub, autologin_regular_user):
1060 self, backend_stub, autologin_regular_user):
1061 repo = backend_stub.create_repo()
1061 repo = backend_stub.create_repo()
1062 # init session for anon user
1062 # init session for anon user
1063 route_path('repo_summary', repo_name=repo.repo_name)
1063 route_path('repo_summary', repo_name=repo.repo_name)
1064
1064
1065 repo_file_add_url = route_path(
1065 repo_file_add_url = route_path(
1066 'repo_files_add_file',
1066 'repo_files_add_file',
1067 repo_name=repo.repo_name,
1067 repo_name=repo.repo_name,
1068 commit_id=0, f_path='')
1068 commit_id=0, f_path='')
1069
1069
1070 response = self.app.get(
1070 response = self.app.get(
1071 route_path('repo_files',
1071 route_path('repo_files',
1072 repo_name=repo.repo_name,
1072 repo_name=repo.repo_name,
1073 commit_id='tip', f_path='/'))
1073 commit_id='tip', f_path='/'))
1074
1074
1075 assert_session_flash(response, no_=repo_file_add_url)
1075 assert_session_flash(response, no_=repo_file_add_url)
1076
1076
1077 @pytest.mark.parametrize('file_node', [
1077 @pytest.mark.parametrize('file_node', [
1078 b'archive/file.zip',
1078 b'archive/file.zip',
1079 b'diff/my-file.txt',
1079 b'diff/my-file.txt',
1080 b'render.py',
1080 b'render.py',
1081 b'render',
1081 b'render',
1082 b'remove_file',
1082 b'remove_file',
1083 b'remove_file/to-delete.txt',
1083 b'remove_file/to-delete.txt',
1084 ])
1084 ])
1085 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1085 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1086 backend.create_repo()
1086 backend.create_repo()
1087 backend.ensure_file(file_node)
1087 backend.ensure_file(file_node)
1088
1088
1089 self.app.get(
1089 self.app.get(
1090 route_path('repo_files',
1090 route_path('repo_files',
1091 repo_name=backend.repo_name,
1091 repo_name=backend.repo_name,
1092 commit_id='tip', f_path=safe_str(file_node)),
1092 commit_id='tip', f_path=safe_str(file_node)),
1093 status=200)
1093 status=200)
1094
1094
1095
1095
1096 class TestAdjustFilePathForSvn(object):
1096 class TestAdjustFilePathForSvn(object):
1097 """
1097 """
1098 SVN specific adjustments of node history in RepoFilesView.
1098 SVN specific adjustments of node history in RepoFilesView.
1099 """
1099 """
1100
1100
1101 def test_returns_path_relative_to_matched_reference(self):
1101 def test_returns_path_relative_to_matched_reference(self):
1102 repo = self._repo(branches=['trunk'])
1102 repo = self._repo(branches=['trunk'])
1103 self.assert_file_adjustment('trunk/file', 'file', repo)
1103 self.assert_file_adjustment('trunk/file', 'file', repo)
1104
1104
1105 def test_does_not_modify_file_if_no_reference_matches(self):
1105 def test_does_not_modify_file_if_no_reference_matches(self):
1106 repo = self._repo(branches=['trunk'])
1106 repo = self._repo(branches=['trunk'])
1107 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1107 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1108
1108
1109 def test_does_not_adjust_partial_directory_names(self):
1109 def test_does_not_adjust_partial_directory_names(self):
1110 repo = self._repo(branches=['trun'])
1110 repo = self._repo(branches=['trun'])
1111 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1111 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1112
1112
1113 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1113 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1114 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1114 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1115 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1115 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1116
1116
1117 def assert_file_adjustment(self, f_path, expected, repo):
1117 def assert_file_adjustment(self, f_path, expected, repo):
1118 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1118 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1119 assert result == expected
1119 assert result == expected
1120
1120
1121 def _repo(self, branches=None):
1121 def _repo(self, branches=None):
1122 repo = mock.Mock()
1122 repo = mock.Mock()
1123 repo.branches = OrderedDict((name, '0') for name in branches or [])
1123 repo.branches = OrderedDict((name, '0') for name in branches or [])
1124 repo.tags = {}
1124 repo.tags = {}
1125 return repo
1125 return repo
@@ -1,197 +1,197 b''
1
1
2
2
3 # Copyright (C) 2012-2023 RhodeCode GmbH
3 # Copyright (C) 2012-2023 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import re
20 import re
21
21
22 import pygments.filter
22 import pygments.filter
23 import pygments.filters
23 import pygments.filters
24 from pygments.token import Comment
24 from pygments.token import Comment
25
25
26 HL_BEG_MARKER = '__RCSearchHLMarkBEG__'
26 HL_BEG_MARKER = '__RCSearchHLMarkBEG__'
27 HL_END_MARKER = '__RCSearchHLMarkEND__'
27 HL_END_MARKER = '__RCSearchHLMarkEND__'
28 HL_MARKER_RE = '{}(.*?){}'.format(HL_BEG_MARKER, HL_END_MARKER)
28 HL_MARKER_RE = '{}(.*?){}'.format(HL_BEG_MARKER, HL_END_MARKER)
29
29
30
30
31 class ElasticSearchHLFilter(pygments.filters.Filter):
31 class ElasticSearchHLFilter(pygments.filters.Filter):
32 _names = [HL_BEG_MARKER, HL_END_MARKER]
32 _names = [HL_BEG_MARKER, HL_END_MARKER]
33
33
34 def __init__(self, **options):
34 def __init__(self, **options):
35 pygments.filters.Filter.__init__(self, **options)
35 pygments.filters.Filter.__init__(self, **options)
36
36
37 def filter(self, lexer, stream):
37 def filter(self, lexer, stream):
38 def tokenize(_value):
38 def tokenize(_value):
39 for token in re.split('({}|{})'.format(
39 for token in re.split('({}|{})'.format(
40 self._names[0], self._names[1]), _value):
40 self._names[0], self._names[1]), _value):
41 if token:
41 if token:
42 yield token
42 yield token
43
43
44 hl = False
44 hl = False
45 for ttype, value in stream:
45 for ttype, value in stream:
46
46
47 if self._names[0] in value or self._names[1] in value:
47 if self._names[0] in value or self._names[1] in value:
48 for item in tokenize(value):
48 for item in tokenize(value):
49 if item == self._names[0]:
49 if item == self._names[0]:
50 # skip marker, but start HL
50 # skip marker, but start HL
51 hl = True
51 hl = True
52 continue
52 continue
53 elif item == self._names[1]:
53 elif item == self._names[1]:
54 hl = False
54 hl = False
55 continue
55 continue
56
56
57 if hl:
57 if hl:
58 yield Comment.ElasticMatch, item
58 yield Comment.ElasticMatch, item
59 else:
59 else:
60 yield ttype, item
60 yield ttype, item
61 else:
61 else:
62 if hl:
62 if hl:
63 yield Comment.ElasticMatch, value
63 yield Comment.ElasticMatch, value
64 else:
64 else:
65 yield ttype, value
65 yield ttype, value
66
66
67
67
68 def extract_phrases(text_query):
68 def extract_phrases(text_query):
69 """
69 """
70 Extracts phrases from search term string making sure phrases
70 Extracts phrases from search term string making sure phrases
71 contained in double quotes are kept together - and discarding empty values
71 contained in double quotes are kept together - and discarding empty values
72 or fully whitespace values eg.
72 or fully whitespace values eg.
73
73
74 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
74 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
75
75
76 """
76 """
77
77
78 in_phrase = False
78 in_phrase = False
79 buf = ''
79 buf = ''
80 phrases = []
80 phrases = []
81 for char in text_query:
81 for char in text_query:
82 if in_phrase:
82 if in_phrase:
83 if char == '"': # end phrase
83 if char == '"': # end phrase
84 phrases.append(buf)
84 phrases.append(buf)
85 buf = ''
85 buf = ''
86 in_phrase = False
86 in_phrase = False
87 continue
87 continue
88 else:
88 else:
89 buf += char
89 buf += char
90 continue
90 continue
91 else:
91 else:
92 if char == '"': # start phrase
92 if char == '"': # start phrase
93 in_phrase = True
93 in_phrase = True
94 phrases.append(buf)
94 phrases.append(buf)
95 buf = ''
95 buf = ''
96 continue
96 continue
97 elif char == ' ':
97 elif char == ' ':
98 phrases.append(buf)
98 phrases.append(buf)
99 buf = ''
99 buf = ''
100 continue
100 continue
101 else:
101 else:
102 buf += char
102 buf += char
103
103
104 phrases.append(buf)
104 phrases.append(buf)
105 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
105 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
106 return phrases
106 return phrases
107
107
108
108
109 def get_matching_phrase_offsets(text, phrases):
109 def get_matching_phrase_offsets(text, phrases):
110 """
110 """
111 Returns a list of string offsets in `text` that the list of `terms` match
111 Returns a list of string offsets in `text` that the list of `terms` match
112
112
113 >>> get_matching_phrase_offsets('some text here', ['some', 'here'])
113 >>> get_matching_phrase_offsets('some text here', ['some', 'here'])
114 [(0, 4), (10, 14)]
114 [(0, 4), (10, 14)]
115
115
116 """
116 """
117 phrases = phrases or []
117 phrases = phrases or []
118 offsets = []
118 offsets = []
119
119
120 for phrase in phrases:
120 for phrase in phrases:
121 for match in re.finditer(phrase, text):
121 for match in re.finditer(phrase, text):
122 offsets.append((match.start(), match.end()))
122 offsets.append((match.start(), match.end()))
123
123
124 return offsets
124 return offsets
125
125
126
126
127 def get_matching_markers_offsets(text, markers=None):
127 def get_matching_markers_offsets(text, markers=None):
128 """
128 r"""
129 Returns a list of string offsets in `text` that the are between matching markers
129 Returns a list of string offsets in `text` that the are between matching markers
130
130
131 >>> get_matching_markers_offsets('$1some$2 text $1here$2 marked', ['\$1(.*?)\$2'])
131 >>> get_matching_markers_offsets('$1some$2 text $1here$2 marked', ['\$1(.*?)\$2'])
132 [(0, 5), (16, 22)]
132 [(0, 5), (16, 22)]
133
133
134 """
134 """
135 markers = markers or [HL_MARKER_RE]
135 markers = markers or [HL_MARKER_RE]
136 offsets = []
136 offsets = []
137
137
138 if markers:
138 if markers:
139 for mark in markers:
139 for mark in markers:
140 for match in re.finditer(mark, text):
140 for match in re.finditer(mark, text):
141 offsets.append((match.start(), match.end()))
141 offsets.append((match.start(), match.end()))
142
142
143 return offsets
143 return offsets
144
144
145
145
146 def normalize_text_for_matching(x):
146 def normalize_text_for_matching(x):
147 """
147 """
148 Replaces all non alfanum characters to spaces and lower cases the string,
148 Replaces all non alfanum characters to spaces and lower cases the string,
149 useful for comparing two text strings without punctuation
149 useful for comparing two text strings without punctuation
150 """
150 """
151 return re.sub(r'\W', ' ', x.lower())
151 return re.sub(r'\W', ' ', x.lower())
152
152
153
153
154 def get_matching_line_offsets(lines, terms=None, markers=None):
154 def get_matching_line_offsets(lines, terms=None, markers=None):
155 """ Return a set of `lines` indices (starting from 1) matching a
155 """ Return a set of `lines` indices (starting from 1) matching a
156 text search query, along with `context` lines above/below matching lines
156 text search query, along with `context` lines above/below matching lines
157
157
158 :param lines: list of strings representing lines
158 :param lines: list of strings representing lines
159 :param terms: search term string to match in lines eg. 'some text'
159 :param terms: search term string to match in lines eg. 'some text'
160 :param markers: instead of terms, use highlight markers instead that
160 :param markers: instead of terms, use highlight markers instead that
161 mark beginning and end for matched item. eg. ['START(.*?)END']
161 mark beginning and end for matched item. eg. ['START(.*?)END']
162
162
163 eg.
163 eg.
164
164
165 text = '''
165 text = '''
166 words words words
166 words words words
167 words words words
167 words words words
168 some text some
168 some text some
169 words words words
169 words words words
170 words words words
170 words words words
171 text here what
171 text here what
172 '''
172 '''
173 get_matching_line_offsets(text, 'text', context=1)
173 get_matching_line_offsets(text, 'text', context=1)
174 6, {3: [(5, 9)], 6: [(0, 4)]]
174 6, {3: [(5, 9)], 6: [(0, 4)]]
175
175
176 """
176 """
177 matching_lines = {}
177 matching_lines = {}
178 line_index = 0
178 line_index = 0
179
179
180 if terms:
180 if terms:
181 phrases = [normalize_text_for_matching(phrase)
181 phrases = [normalize_text_for_matching(phrase)
182 for phrase in extract_phrases(terms)]
182 for phrase in extract_phrases(terms)]
183
183
184 for line_index, line in enumerate(lines.splitlines(), start=1):
184 for line_index, line in enumerate(lines.splitlines(), start=1):
185 normalized_line = normalize_text_for_matching(line)
185 normalized_line = normalize_text_for_matching(line)
186 match_offsets = get_matching_phrase_offsets(normalized_line, phrases)
186 match_offsets = get_matching_phrase_offsets(normalized_line, phrases)
187 if match_offsets:
187 if match_offsets:
188 matching_lines[line_index] = match_offsets
188 matching_lines[line_index] = match_offsets
189
189
190 else:
190 else:
191 markers = markers or [HL_MARKER_RE]
191 markers = markers or [HL_MARKER_RE]
192 for line_index, line in enumerate(lines.splitlines(), start=1):
192 for line_index, line in enumerate(lines.splitlines(), start=1):
193 match_offsets = get_matching_markers_offsets(line, markers=markers)
193 match_offsets = get_matching_markers_offsets(line, markers=markers)
194 if match_offsets:
194 if match_offsets:
195 matching_lines[line_index] = match_offsets
195 matching_lines[line_index] = match_offsets
196
196
197 return line_index, matching_lines
197 return line_index, matching_lines
@@ -1,1730 +1,1735 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import collections
20 import collections
21 import datetime
21 import datetime
22 import os
22 import os
23 import re
23 import re
24 import pprint
24 import pprint
25 import shutil
25 import shutil
26 import socket
26 import socket
27 import subprocess
27 import subprocess
28 import time
28 import time
29 import uuid
29 import uuid
30 import dateutil.tz
30 import dateutil.tz
31 import logging
31 import logging
32 import functools
32 import functools
33
33
34 import mock
34 import mock
35 import pyramid.testing
35 import pyramid.testing
36 import pytest
36 import pytest
37 import colander
37 import colander
38 import requests
38 import requests
39 import pyramid.paster
39 import pyramid.paster
40
40
41 import rhodecode
41 import rhodecode
42 import rhodecode.lib
42 import rhodecode.lib
43 from rhodecode.model.changeset_status import ChangesetStatusModel
43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.comment import CommentsModel
44 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.db import (
45 from rhodecode.model.db import (
46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 from rhodecode.model.meta import Session
48 from rhodecode.model.meta import Session
49 from rhodecode.model.pull_request import PullRequestModel
49 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo_group import RepoGroupModel
51 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.user import UserModel
52 from rhodecode.model.user import UserModel
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.user_group import UserGroupModel
54 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.integration import IntegrationModel
55 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.integrations import integration_type_registry
56 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations.types.base import IntegrationTypeBase
57 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.lib.utils import repo2db_mapper
58 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.str_utils import safe_bytes
59 from rhodecode.lib.str_utils import safe_bytes
60 from rhodecode.lib.hash_utils import sha1_safe
60 from rhodecode.lib.hash_utils import sha1_safe
61 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.backends import get_backend
62 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.lib.vcs.nodes import FileNode
63 from rhodecode.tests import (
63 from rhodecode.tests import (
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 TEST_USER_REGULAR_PASS)
66 TEST_USER_REGULAR_PASS)
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 from rhodecode.tests.fixture import Fixture
68 from rhodecode.tests.fixture import Fixture
69 from rhodecode.config import utils as config_utils
69 from rhodecode.config import utils as config_utils
70
70
71 log = logging.getLogger(__name__)
71 log = logging.getLogger(__name__)
72
72
73
73
74 def cmp(a, b):
74 def cmp(a, b):
75 # backport cmp from python2 so we can still use it in the custom code in this module
75 # backport cmp from python2 so we can still use it in the custom code in this module
76 return (a > b) - (a < b)
76 return (a > b) - (a < b)
77
77
78
78
79 @pytest.fixture(scope='session', autouse=True)
79 @pytest.fixture(scope='session', autouse=True)
80 def activate_example_rcextensions(request):
80 def activate_example_rcextensions(request):
81 """
81 """
82 Patch in an example rcextensions module which verifies passed in kwargs.
82 Patch in an example rcextensions module which verifies passed in kwargs.
83 """
83 """
84 from rhodecode.config import rcextensions
84 from rhodecode.config import rcextensions
85
85
86 old_extensions = rhodecode.EXTENSIONS
86 old_extensions = rhodecode.EXTENSIONS
87 rhodecode.EXTENSIONS = rcextensions
87 rhodecode.EXTENSIONS = rcextensions
88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
89
89
90 @request.addfinalizer
90 @request.addfinalizer
91 def cleanup():
91 def cleanup():
92 rhodecode.EXTENSIONS = old_extensions
92 rhodecode.EXTENSIONS = old_extensions
93
93
94
94
95 @pytest.fixture()
95 @pytest.fixture()
96 def capture_rcextensions():
96 def capture_rcextensions():
97 """
97 """
98 Returns the recorded calls to entry points in rcextensions.
98 Returns the recorded calls to entry points in rcextensions.
99 """
99 """
100 calls = rhodecode.EXTENSIONS.calls
100 calls = rhodecode.EXTENSIONS.calls
101 calls.clear()
101 calls.clear()
102 # Note: At this moment, it is still the empty dict, but that will
102 # Note: At this moment, it is still the empty dict, but that will
103 # be filled during the test run and since it is a reference this
103 # be filled during the test run and since it is a reference this
104 # is enough to make it work.
104 # is enough to make it work.
105 return calls
105 return calls
106
106
107
107
108 @pytest.fixture(scope='session')
108 @pytest.fixture(scope='session')
109 def http_environ_session():
109 def http_environ_session():
110 """
110 """
111 Allow to use "http_environ" in session scope.
111 Allow to use "http_environ" in session scope.
112 """
112 """
113 return plain_http_environ()
113 return plain_http_environ()
114
114
115
115
116 def plain_http_host_stub():
116 def plain_http_host_stub():
117 """
117 """
118 Value of HTTP_HOST in the test run.
118 Value of HTTP_HOST in the test run.
119 """
119 """
120 return 'example.com:80'
120 return 'example.com:80'
121
121
122
122
123 @pytest.fixture()
123 @pytest.fixture()
124 def http_host_stub():
124 def http_host_stub():
125 """
125 """
126 Value of HTTP_HOST in the test run.
126 Value of HTTP_HOST in the test run.
127 """
127 """
128 return plain_http_host_stub()
128 return plain_http_host_stub()
129
129
130
130
131 def plain_http_host_only_stub():
131 def plain_http_host_only_stub():
132 """
132 """
133 Value of HTTP_HOST in the test run.
133 Value of HTTP_HOST in the test run.
134 """
134 """
135 return plain_http_host_stub().split(':')[0]
135 return plain_http_host_stub().split(':')[0]
136
136
137
137
138 @pytest.fixture()
138 @pytest.fixture()
139 def http_host_only_stub():
139 def http_host_only_stub():
140 """
140 """
141 Value of HTTP_HOST in the test run.
141 Value of HTTP_HOST in the test run.
142 """
142 """
143 return plain_http_host_only_stub()
143 return plain_http_host_only_stub()
144
144
145
145
146 def plain_http_environ():
146 def plain_http_environ():
147 """
147 """
148 HTTP extra environ keys.
148 HTTP extra environ keys.
149
149
150 User by the test application and as well for setting up the pylons
150 User by the test application and as well for setting up the pylons
151 environment. In the case of the fixture "app" it should be possible
151 environment. In the case of the fixture "app" it should be possible
152 to override this for a specific test case.
152 to override this for a specific test case.
153 """
153 """
154 return {
154 return {
155 'SERVER_NAME': plain_http_host_only_stub(),
155 'SERVER_NAME': plain_http_host_only_stub(),
156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
157 'HTTP_HOST': plain_http_host_stub(),
157 'HTTP_HOST': plain_http_host_stub(),
158 'HTTP_USER_AGENT': 'rc-test-agent',
158 'HTTP_USER_AGENT': 'rc-test-agent',
159 'REQUEST_METHOD': 'GET'
159 'REQUEST_METHOD': 'GET'
160 }
160 }
161
161
162
162
163 @pytest.fixture()
163 @pytest.fixture()
164 def http_environ():
164 def http_environ():
165 """
165 """
166 HTTP extra environ keys.
166 HTTP extra environ keys.
167
167
168 User by the test application and as well for setting up the pylons
168 User by the test application and as well for setting up the pylons
169 environment. In the case of the fixture "app" it should be possible
169 environment. In the case of the fixture "app" it should be possible
170 to override this for a specific test case.
170 to override this for a specific test case.
171 """
171 """
172 return plain_http_environ()
172 return plain_http_environ()
173
173
174
174
175 @pytest.fixture(scope='session')
175 @pytest.fixture(scope='session')
176 def baseapp(ini_config, vcsserver, http_environ_session):
176 def baseapp(ini_config, vcsserver, http_environ_session):
177 from rhodecode.lib.pyramid_utils import get_app_config
177 from rhodecode.lib.pyramid_utils import get_app_config
178 from rhodecode.config.middleware import make_pyramid_app
178 from rhodecode.config.middleware import make_pyramid_app
179
179
180 log.info("Using the RhodeCode configuration:{}".format(ini_config))
180 log.info("Using the RhodeCode configuration:{}".format(ini_config))
181 pyramid.paster.setup_logging(ini_config)
181 pyramid.paster.setup_logging(ini_config)
182
182
183 settings = get_app_config(ini_config)
183 settings = get_app_config(ini_config)
184 app = make_pyramid_app({'__file__': ini_config}, **settings)
184 app = make_pyramid_app({'__file__': ini_config}, **settings)
185
185
186 return app
186 return app
187
187
188
188
189 @pytest.fixture(scope='function')
189 @pytest.fixture(scope='function')
190 def app(request, config_stub, baseapp, http_environ):
190 def app(request, config_stub, baseapp, http_environ):
191 app = CustomTestApp(
191 app = CustomTestApp(
192 baseapp,
192 baseapp,
193 extra_environ=http_environ)
193 extra_environ=http_environ)
194 if request.cls:
194 if request.cls:
195 request.cls.app = app
195 request.cls.app = app
196 return app
196 return app
197
197
198
198
199 @pytest.fixture(scope='session')
199 @pytest.fixture(scope='session')
200 def app_settings(baseapp, ini_config):
200 def app_settings(baseapp, ini_config):
201 """
201 """
202 Settings dictionary used to create the app.
202 Settings dictionary used to create the app.
203
203
204 Parses the ini file and passes the result through the sanitize and apply
204 Parses the ini file and passes the result through the sanitize and apply
205 defaults mechanism in `rhodecode.config.middleware`.
205 defaults mechanism in `rhodecode.config.middleware`.
206 """
206 """
207 return baseapp.config.get_settings()
207 return baseapp.config.get_settings()
208
208
209
209
210 @pytest.fixture(scope='session')
210 @pytest.fixture(scope='session')
211 def db_connection(ini_settings):
211 def db_connection(ini_settings):
212 # Initialize the database connection.
212 # Initialize the database connection.
213 config_utils.initialize_database(ini_settings)
213 config_utils.initialize_database(ini_settings)
214
214
215
215
216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
217
217
218
218
219 def _autologin_user(app, *args):
219 def _autologin_user(app, *args):
220 session = login_user_session(app, *args)
220 session = login_user_session(app, *args)
221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
222 return LoginData(csrf_token, session['rhodecode_user'])
222 return LoginData(csrf_token, session['rhodecode_user'])
223
223
224
224
225 @pytest.fixture()
225 @pytest.fixture()
226 def autologin_user(app):
226 def autologin_user(app):
227 """
227 """
228 Utility fixture which makes sure that the admin user is logged in
228 Utility fixture which makes sure that the admin user is logged in
229 """
229 """
230 return _autologin_user(app)
230 return _autologin_user(app)
231
231
232
232
233 @pytest.fixture()
233 @pytest.fixture()
234 def autologin_regular_user(app):
234 def autologin_regular_user(app):
235 """
235 """
236 Utility fixture which makes sure that the regular user is logged in
236 Utility fixture which makes sure that the regular user is logged in
237 """
237 """
238 return _autologin_user(
238 return _autologin_user(
239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
240
240
241
241
242 @pytest.fixture(scope='function')
242 @pytest.fixture(scope='function')
243 def csrf_token(request, autologin_user):
243 def csrf_token(request, autologin_user):
244 return autologin_user.csrf_token
244 return autologin_user.csrf_token
245
245
246
246
247 @pytest.fixture(scope='function')
247 @pytest.fixture(scope='function')
248 def xhr_header(request):
248 def xhr_header(request):
249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
250
250
251
251
252 @pytest.fixture()
252 @pytest.fixture()
253 def real_crypto_backend(monkeypatch):
253 def real_crypto_backend(monkeypatch):
254 """
254 """
255 Switch the production crypto backend on for this test.
255 Switch the production crypto backend on for this test.
256
256
257 During the test run the crypto backend is replaced with a faster
257 During the test run the crypto backend is replaced with a faster
258 implementation based on the MD5 algorithm.
258 implementation based on the MD5 algorithm.
259 """
259 """
260 monkeypatch.setattr(rhodecode, 'is_test', False)
260 monkeypatch.setattr(rhodecode, 'is_test', False)
261
261
262
262
263 @pytest.fixture(scope='class')
263 @pytest.fixture(scope='class')
264 def index_location(request, baseapp):
264 def index_location(request, baseapp):
265 index_location = baseapp.config.get_settings()['search.location']
265 index_location = baseapp.config.get_settings()['search.location']
266 if request.cls:
266 if request.cls:
267 request.cls.index_location = index_location
267 request.cls.index_location = index_location
268 return index_location
268 return index_location
269
269
270
270
271 @pytest.fixture(scope='session', autouse=True)
271 @pytest.fixture(scope='session', autouse=True)
272 def tests_tmp_path(request):
272 def tests_tmp_path(request):
273 """
273 """
274 Create temporary directory to be used during the test session.
274 Create temporary directory to be used during the test session.
275 """
275 """
276 if not os.path.exists(TESTS_TMP_PATH):
276 if not os.path.exists(TESTS_TMP_PATH):
277 os.makedirs(TESTS_TMP_PATH)
277 os.makedirs(TESTS_TMP_PATH)
278
278
279 if not request.config.getoption('--keep-tmp-path'):
279 if not request.config.getoption('--keep-tmp-path'):
280 @request.addfinalizer
280 @request.addfinalizer
281 def remove_tmp_path():
281 def remove_tmp_path():
282 shutil.rmtree(TESTS_TMP_PATH)
282 shutil.rmtree(TESTS_TMP_PATH)
283
283
284 return TESTS_TMP_PATH
284 return TESTS_TMP_PATH
285
285
286
286
287 @pytest.fixture()
287 @pytest.fixture()
288 def test_repo_group(request):
288 def test_repo_group(request):
289 """
289 """
290 Create a temporary repository group, and destroy it after
290 Create a temporary repository group, and destroy it after
291 usage automatically
291 usage automatically
292 """
292 """
293 fixture = Fixture()
293 fixture = Fixture()
294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
295 repo_group = fixture.create_repo_group(repogroupid)
295 repo_group = fixture.create_repo_group(repogroupid)
296
296
297 def _cleanup():
297 def _cleanup():
298 fixture.destroy_repo_group(repogroupid)
298 fixture.destroy_repo_group(repogroupid)
299
299
300 request.addfinalizer(_cleanup)
300 request.addfinalizer(_cleanup)
301 return repo_group
301 return repo_group
302
302
303
303
304 @pytest.fixture()
304 @pytest.fixture()
305 def test_user_group(request):
305 def test_user_group(request):
306 """
306 """
307 Create a temporary user group, and destroy it after
307 Create a temporary user group, and destroy it after
308 usage automatically
308 usage automatically
309 """
309 """
310 fixture = Fixture()
310 fixture = Fixture()
311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
312 user_group = fixture.create_user_group(usergroupid)
312 user_group = fixture.create_user_group(usergroupid)
313
313
314 def _cleanup():
314 def _cleanup():
315 fixture.destroy_user_group(user_group)
315 fixture.destroy_user_group(user_group)
316
316
317 request.addfinalizer(_cleanup)
317 request.addfinalizer(_cleanup)
318 return user_group
318 return user_group
319
319
320
320
321 @pytest.fixture(scope='session')
321 @pytest.fixture(scope='session')
322 def test_repo(request):
322 def test_repo(request):
323 container = TestRepoContainer()
323 container = TestRepoContainer()
324 request.addfinalizer(container._cleanup)
324 request.addfinalizer(container._cleanup)
325 return container
325 return container
326
326
327
327
328 class TestRepoContainer(object):
328 class TestRepoContainer(object):
329 """
329 """
330 Container for test repositories which are used read only.
330 Container for test repositories which are used read only.
331
331
332 Repositories will be created on demand and re-used during the lifetime
332 Repositories will be created on demand and re-used during the lifetime
333 of this object.
333 of this object.
334
334
335 Usage to get the svn test repository "minimal"::
335 Usage to get the svn test repository "minimal"::
336
336
337 test_repo = TestContainer()
337 test_repo = TestContainer()
338 repo = test_repo('minimal', 'svn')
338 repo = test_repo('minimal', 'svn')
339
339
340 """
340 """
341
341
342 dump_extractors = {
342 dump_extractors = {
343 'git': utils.extract_git_repo_from_dump,
343 'git': utils.extract_git_repo_from_dump,
344 'hg': utils.extract_hg_repo_from_dump,
344 'hg': utils.extract_hg_repo_from_dump,
345 'svn': utils.extract_svn_repo_from_dump,
345 'svn': utils.extract_svn_repo_from_dump,
346 }
346 }
347
347
348 def __init__(self):
348 def __init__(self):
349 self._cleanup_repos = []
349 self._cleanup_repos = []
350 self._fixture = Fixture()
350 self._fixture = Fixture()
351 self._repos = {}
351 self._repos = {}
352
352
353 def __call__(self, dump_name, backend_alias, config=None):
353 def __call__(self, dump_name, backend_alias, config=None):
354 key = (dump_name, backend_alias)
354 key = (dump_name, backend_alias)
355 if key not in self._repos:
355 if key not in self._repos:
356 repo = self._create_repo(dump_name, backend_alias, config)
356 repo = self._create_repo(dump_name, backend_alias, config)
357 self._repos[key] = repo.repo_id
357 self._repos[key] = repo.repo_id
358 return Repository.get(self._repos[key])
358 return Repository.get(self._repos[key])
359
359
360 def _create_repo(self, dump_name, backend_alias, config):
360 def _create_repo(self, dump_name, backend_alias, config):
361 repo_name = '%s-%s' % (backend_alias, dump_name)
361 repo_name = '%s-%s' % (backend_alias, dump_name)
362 backend = get_backend(backend_alias)
362 backend = get_backend(backend_alias)
363 dump_extractor = self.dump_extractors[backend_alias]
363 dump_extractor = self.dump_extractors[backend_alias]
364 repo_path = dump_extractor(dump_name, repo_name)
364 repo_path = dump_extractor(dump_name, repo_name)
365
365
366 vcs_repo = backend(repo_path, config=config)
366 vcs_repo = backend(repo_path, config=config)
367 repo2db_mapper({repo_name: vcs_repo})
367 repo2db_mapper({repo_name: vcs_repo})
368
368
369 repo = RepoModel().get_by_repo_name(repo_name)
369 repo = RepoModel().get_by_repo_name(repo_name)
370 self._cleanup_repos.append(repo_name)
370 self._cleanup_repos.append(repo_name)
371 return repo
371 return repo
372
372
373 def _cleanup(self):
373 def _cleanup(self):
374 for repo_name in reversed(self._cleanup_repos):
374 for repo_name in reversed(self._cleanup_repos):
375 self._fixture.destroy_repo(repo_name)
375 self._fixture.destroy_repo(repo_name)
376
376
377
377
378 def backend_base(request, backend_alias, baseapp, test_repo):
378 def backend_base(request, backend_alias, baseapp, test_repo):
379 if backend_alias not in request.config.getoption('--backends'):
379 if backend_alias not in request.config.getoption('--backends'):
380 pytest.skip("Backend %s not selected." % (backend_alias, ))
380 pytest.skip("Backend %s not selected." % (backend_alias, ))
381
381
382 utils.check_xfail_backends(request.node, backend_alias)
382 utils.check_xfail_backends(request.node, backend_alias)
383 utils.check_skip_backends(request.node, backend_alias)
383 utils.check_skip_backends(request.node, backend_alias)
384
384
385 repo_name = 'vcs_test_%s' % (backend_alias, )
385 repo_name = 'vcs_test_%s' % (backend_alias, )
386 backend = Backend(
386 backend = Backend(
387 alias=backend_alias,
387 alias=backend_alias,
388 repo_name=repo_name,
388 repo_name=repo_name,
389 test_name=request.node.name,
389 test_name=request.node.name,
390 test_repo_container=test_repo)
390 test_repo_container=test_repo)
391 request.addfinalizer(backend.cleanup)
391 request.addfinalizer(backend.cleanup)
392 return backend
392 return backend
393
393
394
394
395 @pytest.fixture()
395 @pytest.fixture()
396 def backend(request, backend_alias, baseapp, test_repo):
396 def backend(request, backend_alias, baseapp, test_repo):
397 """
397 """
398 Parametrized fixture which represents a single backend implementation.
398 Parametrized fixture which represents a single backend implementation.
399
399
400 It respects the option `--backends` to focus the test run on specific
400 It respects the option `--backends` to focus the test run on specific
401 backend implementations.
401 backend implementations.
402
402
403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
404 for specific backends. This is intended as a utility for incremental
404 for specific backends. This is intended as a utility for incremental
405 development of a new backend implementation.
405 development of a new backend implementation.
406 """
406 """
407 return backend_base(request, backend_alias, baseapp, test_repo)
407 return backend_base(request, backend_alias, baseapp, test_repo)
408
408
409
409
410 @pytest.fixture()
410 @pytest.fixture()
411 def backend_git(request, baseapp, test_repo):
411 def backend_git(request, baseapp, test_repo):
412 return backend_base(request, 'git', baseapp, test_repo)
412 return backend_base(request, 'git', baseapp, test_repo)
413
413
414
414
415 @pytest.fixture()
415 @pytest.fixture()
416 def backend_hg(request, baseapp, test_repo):
416 def backend_hg(request, baseapp, test_repo):
417 return backend_base(request, 'hg', baseapp, test_repo)
417 return backend_base(request, 'hg', baseapp, test_repo)
418
418
419
419
420 @pytest.fixture()
420 @pytest.fixture()
421 def backend_svn(request, baseapp, test_repo):
421 def backend_svn(request, baseapp, test_repo):
422 return backend_base(request, 'svn', baseapp, test_repo)
422 return backend_base(request, 'svn', baseapp, test_repo)
423
423
424
424
425 @pytest.fixture()
425 @pytest.fixture()
426 def backend_random(backend_git):
426 def backend_random(backend_git):
427 """
427 """
428 Use this to express that your tests need "a backend.
428 Use this to express that your tests need "a backend.
429
429
430 A few of our tests need a backend, so that we can run the code. This
430 A few of our tests need a backend, so that we can run the code. This
431 fixture is intended to be used for such cases. It will pick one of the
431 fixture is intended to be used for such cases. It will pick one of the
432 backends and run the tests.
432 backends and run the tests.
433
433
434 The fixture `backend` would run the test multiple times for each
434 The fixture `backend` would run the test multiple times for each
435 available backend which is a pure waste of time if the test is
435 available backend which is a pure waste of time if the test is
436 independent of the backend type.
436 independent of the backend type.
437 """
437 """
438 # TODO: johbo: Change this to pick a random backend
438 # TODO: johbo: Change this to pick a random backend
439 return backend_git
439 return backend_git
440
440
441
441
442 @pytest.fixture()
442 @pytest.fixture()
443 def backend_stub(backend_git):
443 def backend_stub(backend_git):
444 """
444 """
445 Use this to express that your tests need a backend stub
445 Use this to express that your tests need a backend stub
446
446
447 TODO: mikhail: Implement a real stub logic instead of returning
447 TODO: mikhail: Implement a real stub logic instead of returning
448 a git backend
448 a git backend
449 """
449 """
450 return backend_git
450 return backend_git
451
451
452
452
453 @pytest.fixture()
453 @pytest.fixture()
454 def repo_stub(backend_stub):
454 def repo_stub(backend_stub):
455 """
455 """
456 Use this to express that your tests need a repository stub
456 Use this to express that your tests need a repository stub
457 """
457 """
458 return backend_stub.create_repo()
458 return backend_stub.create_repo()
459
459
460
460
461 class Backend(object):
461 class Backend(object):
462 """
462 """
463 Represents the test configuration for one supported backend
463 Represents the test configuration for one supported backend
464
464
465 Provides easy access to different test repositories based on
465 Provides easy access to different test repositories based on
466 `__getitem__`. Such repositories will only be created once per test
466 `__getitem__`. Such repositories will only be created once per test
467 session.
467 session.
468 """
468 """
469
469
470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
471 _master_repo = None
471 _master_repo = None
472 _master_repo_path = ''
472 _master_repo_path = ''
473 _commit_ids = {}
473 _commit_ids = {}
474
474
475 def __init__(self, alias, repo_name, test_name, test_repo_container):
475 def __init__(self, alias, repo_name, test_name, test_repo_container):
476 self.alias = alias
476 self.alias = alias
477 self.repo_name = repo_name
477 self.repo_name = repo_name
478 self._cleanup_repos = []
478 self._cleanup_repos = []
479 self._test_name = test_name
479 self._test_name = test_name
480 self._test_repo_container = test_repo_container
480 self._test_repo_container = test_repo_container
481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
482 # Fixture will survive in the end.
482 # Fixture will survive in the end.
483 self._fixture = Fixture()
483 self._fixture = Fixture()
484
484
485 def __getitem__(self, key):
485 def __getitem__(self, key):
486 return self._test_repo_container(key, self.alias)
486 return self._test_repo_container(key, self.alias)
487
487
488 def create_test_repo(self, key, config=None):
488 def create_test_repo(self, key, config=None):
489 return self._test_repo_container(key, self.alias, config)
489 return self._test_repo_container(key, self.alias, config)
490
490
491 @property
491 @property
492 def repo_id(self):
493 # just fake some repo_id
494 return self.repo.repo_id
495
496 @property
492 def repo(self):
497 def repo(self):
493 """
498 """
494 Returns the "current" repository. This is the vcs_test repo or the
499 Returns the "current" repository. This is the vcs_test repo or the
495 last repo which has been created with `create_repo`.
500 last repo which has been created with `create_repo`.
496 """
501 """
497 from rhodecode.model.db import Repository
502 from rhodecode.model.db import Repository
498 return Repository.get_by_repo_name(self.repo_name)
503 return Repository.get_by_repo_name(self.repo_name)
499
504
500 @property
505 @property
501 def default_branch_name(self):
506 def default_branch_name(self):
502 VcsRepository = get_backend(self.alias)
507 VcsRepository = get_backend(self.alias)
503 return VcsRepository.DEFAULT_BRANCH_NAME
508 return VcsRepository.DEFAULT_BRANCH_NAME
504
509
505 @property
510 @property
506 def default_head_id(self):
511 def default_head_id(self):
507 """
512 """
508 Returns the default head id of the underlying backend.
513 Returns the default head id of the underlying backend.
509
514
510 This will be the default branch name in case the backend does have a
515 This will be the default branch name in case the backend does have a
511 default branch. In the other cases it will point to a valid head
516 default branch. In the other cases it will point to a valid head
512 which can serve as the base to create a new commit on top of it.
517 which can serve as the base to create a new commit on top of it.
513 """
518 """
514 vcsrepo = self.repo.scm_instance()
519 vcsrepo = self.repo.scm_instance()
515 head_id = (
520 head_id = (
516 vcsrepo.DEFAULT_BRANCH_NAME or
521 vcsrepo.DEFAULT_BRANCH_NAME or
517 vcsrepo.commit_ids[-1])
522 vcsrepo.commit_ids[-1])
518 return head_id
523 return head_id
519
524
520 @property
525 @property
521 def commit_ids(self):
526 def commit_ids(self):
522 """
527 """
523 Returns the list of commits for the last created repository
528 Returns the list of commits for the last created repository
524 """
529 """
525 return self._commit_ids
530 return self._commit_ids
526
531
527 def create_master_repo(self, commits):
532 def create_master_repo(self, commits):
528 """
533 """
529 Create a repository and remember it as a template.
534 Create a repository and remember it as a template.
530
535
531 This allows to easily create derived repositories to construct
536 This allows to easily create derived repositories to construct
532 more complex scenarios for diff, compare and pull requests.
537 more complex scenarios for diff, compare and pull requests.
533
538
534 Returns a commit map which maps from commit message to raw_id.
539 Returns a commit map which maps from commit message to raw_id.
535 """
540 """
536 self._master_repo = self.create_repo(commits=commits)
541 self._master_repo = self.create_repo(commits=commits)
537 self._master_repo_path = self._master_repo.repo_full_path
542 self._master_repo_path = self._master_repo.repo_full_path
538
543
539 return self._commit_ids
544 return self._commit_ids
540
545
541 def create_repo(
546 def create_repo(
542 self, commits=None, number_of_commits=0, heads=None,
547 self, commits=None, number_of_commits=0, heads=None,
543 name_suffix='', bare=False, **kwargs):
548 name_suffix='', bare=False, **kwargs):
544 """
549 """
545 Create a repository and record it for later cleanup.
550 Create a repository and record it for later cleanup.
546
551
547 :param commits: Optional. A sequence of dict instances.
552 :param commits: Optional. A sequence of dict instances.
548 Will add a commit per entry to the new repository.
553 Will add a commit per entry to the new repository.
549 :param number_of_commits: Optional. If set to a number, this number of
554 :param number_of_commits: Optional. If set to a number, this number of
550 commits will be added to the new repository.
555 commits will be added to the new repository.
551 :param heads: Optional. Can be set to a sequence of of commit
556 :param heads: Optional. Can be set to a sequence of of commit
552 names which shall be pulled in from the master repository.
557 names which shall be pulled in from the master repository.
553 :param name_suffix: adds special suffix to generated repo name
558 :param name_suffix: adds special suffix to generated repo name
554 :param bare: set a repo as bare (no checkout)
559 :param bare: set a repo as bare (no checkout)
555 """
560 """
556 self.repo_name = self._next_repo_name() + name_suffix
561 self.repo_name = self._next_repo_name() + name_suffix
557 repo = self._fixture.create_repo(
562 repo = self._fixture.create_repo(
558 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
563 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
559 self._cleanup_repos.append(repo.repo_name)
564 self._cleanup_repos.append(repo.repo_name)
560
565
561 commits = commits or [
566 commits = commits or [
562 {'message': 'Commit %s of %s' % (x, self.repo_name)}
567 {'message': 'Commit %s of %s' % (x, self.repo_name)}
563 for x in range(number_of_commits)]
568 for x in range(number_of_commits)]
564 vcs_repo = repo.scm_instance()
569 vcs_repo = repo.scm_instance()
565 vcs_repo.count()
570 vcs_repo.count()
566 self._add_commits_to_repo(vcs_repo, commits)
571 self._add_commits_to_repo(vcs_repo, commits)
567 if heads:
572 if heads:
568 self.pull_heads(repo, heads)
573 self.pull_heads(repo, heads)
569
574
570 return repo
575 return repo
571
576
572 def pull_heads(self, repo, heads):
577 def pull_heads(self, repo, heads):
573 """
578 """
574 Make sure that repo contains all commits mentioned in `heads`
579 Make sure that repo contains all commits mentioned in `heads`
575 """
580 """
576 vcsrepo = repo.scm_instance()
581 vcsrepo = repo.scm_instance()
577 vcsrepo.config.clear_section('hooks')
582 vcsrepo.config.clear_section('hooks')
578 commit_ids = [self._commit_ids[h] for h in heads]
583 commit_ids = [self._commit_ids[h] for h in heads]
579 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
584 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
580
585
581 def create_fork(self):
586 def create_fork(self):
582 repo_to_fork = self.repo_name
587 repo_to_fork = self.repo_name
583 self.repo_name = self._next_repo_name()
588 self.repo_name = self._next_repo_name()
584 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
589 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
585 self._cleanup_repos.append(self.repo_name)
590 self._cleanup_repos.append(self.repo_name)
586 return repo
591 return repo
587
592
588 def new_repo_name(self, suffix=''):
593 def new_repo_name(self, suffix=''):
589 self.repo_name = self._next_repo_name() + suffix
594 self.repo_name = self._next_repo_name() + suffix
590 self._cleanup_repos.append(self.repo_name)
595 self._cleanup_repos.append(self.repo_name)
591 return self.repo_name
596 return self.repo_name
592
597
593 def _next_repo_name(self):
598 def _next_repo_name(self):
594 return u"%s_%s" % (
599 return u"%s_%s" % (
595 self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos))
600 self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos))
596
601
597 def ensure_file(self, filename, content='Test content\n'):
602 def ensure_file(self, filename, content='Test content\n'):
598 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
603 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
599 commits = [
604 commits = [
600 {'added': [
605 {'added': [
601 FileNode(filename, content=content),
606 FileNode(filename, content=content),
602 ]},
607 ]},
603 ]
608 ]
604 self._add_commits_to_repo(self.repo.scm_instance(), commits)
609 self._add_commits_to_repo(self.repo.scm_instance(), commits)
605
610
606 def enable_downloads(self):
611 def enable_downloads(self):
607 repo = self.repo
612 repo = self.repo
608 repo.enable_downloads = True
613 repo.enable_downloads = True
609 Session().add(repo)
614 Session().add(repo)
610 Session().commit()
615 Session().commit()
611
616
612 def cleanup(self):
617 def cleanup(self):
613 for repo_name in reversed(self._cleanup_repos):
618 for repo_name in reversed(self._cleanup_repos):
614 self._fixture.destroy_repo(repo_name)
619 self._fixture.destroy_repo(repo_name)
615
620
616 def _add_commits_to_repo(self, repo, commits):
621 def _add_commits_to_repo(self, repo, commits):
617 commit_ids = _add_commits_to_repo(repo, commits)
622 commit_ids = _add_commits_to_repo(repo, commits)
618 if not commit_ids:
623 if not commit_ids:
619 return
624 return
620 self._commit_ids = commit_ids
625 self._commit_ids = commit_ids
621
626
622 # Creating refs for Git to allow fetching them from remote repository
627 # Creating refs for Git to allow fetching them from remote repository
623 if self.alias == 'git':
628 if self.alias == 'git':
624 refs = {}
629 refs = {}
625 for message in self._commit_ids:
630 for message in self._commit_ids:
626 # TODO: mikhail: do more special chars replacements
631 # TODO: mikhail: do more special chars replacements
627 ref_name = 'refs/test-refs/{}'.format(
632 ref_name = 'refs/test-refs/{}'.format(
628 message.replace(' ', ''))
633 message.replace(' ', ''))
629 refs[ref_name] = self._commit_ids[message]
634 refs[ref_name] = self._commit_ids[message]
630 self._create_refs(repo, refs)
635 self._create_refs(repo, refs)
631
636
632 def _create_refs(self, repo, refs):
637 def _create_refs(self, repo, refs):
633 for ref_name in refs:
638 for ref_name in refs:
634 repo.set_refs(ref_name, refs[ref_name])
639 repo.set_refs(ref_name, refs[ref_name])
635
640
636
641
637 class VcsBackend(object):
642 class VcsBackend(object):
638 """
643 """
639 Represents the test configuration for one supported vcs backend.
644 Represents the test configuration for one supported vcs backend.
640 """
645 """
641
646
642 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
647 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
643
648
644 def __init__(self, alias, repo_path, test_name, test_repo_container):
649 def __init__(self, alias, repo_path, test_name, test_repo_container):
645 self.alias = alias
650 self.alias = alias
646 self._repo_path = repo_path
651 self._repo_path = repo_path
647 self._cleanup_repos = []
652 self._cleanup_repos = []
648 self._test_name = test_name
653 self._test_name = test_name
649 self._test_repo_container = test_repo_container
654 self._test_repo_container = test_repo_container
650
655
651 def __getitem__(self, key):
656 def __getitem__(self, key):
652 return self._test_repo_container(key, self.alias).scm_instance()
657 return self._test_repo_container(key, self.alias).scm_instance()
653
658
654 def __repr__(self):
659 def __repr__(self):
655 return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})'
660 return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})'
656
661
657 @property
662 @property
658 def repo(self):
663 def repo(self):
659 """
664 """
660 Returns the "current" repository. This is the vcs_test repo of the last
665 Returns the "current" repository. This is the vcs_test repo of the last
661 repo which has been created.
666 repo which has been created.
662 """
667 """
663 Repository = get_backend(self.alias)
668 Repository = get_backend(self.alias)
664 return Repository(self._repo_path)
669 return Repository(self._repo_path)
665
670
666 @property
671 @property
667 def backend(self):
672 def backend(self):
668 """
673 """
669 Returns the backend implementation class.
674 Returns the backend implementation class.
670 """
675 """
671 return get_backend(self.alias)
676 return get_backend(self.alias)
672
677
673 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
678 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
674 bare=False):
679 bare=False):
675 repo_name = self._next_repo_name()
680 repo_name = self._next_repo_name()
676 self._repo_path = get_new_dir(repo_name)
681 self._repo_path = get_new_dir(repo_name)
677 repo_class = get_backend(self.alias)
682 repo_class = get_backend(self.alias)
678 src_url = None
683 src_url = None
679 if _clone_repo:
684 if _clone_repo:
680 src_url = _clone_repo.path
685 src_url = _clone_repo.path
681 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
686 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
682 self._cleanup_repos.append(repo)
687 self._cleanup_repos.append(repo)
683
688
684 commits = commits or [
689 commits = commits or [
685 {'message': 'Commit %s of %s' % (x, repo_name)}
690 {'message': 'Commit %s of %s' % (x, repo_name)}
686 for x in range(number_of_commits)]
691 for x in range(number_of_commits)]
687 _add_commits_to_repo(repo, commits)
692 _add_commits_to_repo(repo, commits)
688 return repo
693 return repo
689
694
690 def clone_repo(self, repo):
695 def clone_repo(self, repo):
691 return self.create_repo(_clone_repo=repo)
696 return self.create_repo(_clone_repo=repo)
692
697
693 def cleanup(self):
698 def cleanup(self):
694 for repo in self._cleanup_repos:
699 for repo in self._cleanup_repos:
695 shutil.rmtree(repo.path)
700 shutil.rmtree(repo.path)
696
701
697 def new_repo_path(self):
702 def new_repo_path(self):
698 repo_name = self._next_repo_name()
703 repo_name = self._next_repo_name()
699 self._repo_path = get_new_dir(repo_name)
704 self._repo_path = get_new_dir(repo_name)
700 return self._repo_path
705 return self._repo_path
701
706
702 def _next_repo_name(self):
707 def _next_repo_name(self):
703
708
704 return "{}_{}".format(
709 return "{}_{}".format(
705 self.invalid_repo_name.sub('_', self._test_name),
710 self.invalid_repo_name.sub('_', self._test_name),
706 len(self._cleanup_repos)
711 len(self._cleanup_repos)
707 )
712 )
708
713
709 def add_file(self, repo, filename, content='Test content\n'):
714 def add_file(self, repo, filename, content='Test content\n'):
710 imc = repo.in_memory_commit
715 imc = repo.in_memory_commit
711 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
716 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
712 imc.commit(
717 imc.commit(
713 message='Automatic commit from vcsbackend fixture',
718 message='Automatic commit from vcsbackend fixture',
714 author='Automatic <automatic@rhodecode.com>')
719 author='Automatic <automatic@rhodecode.com>')
715
720
716 def ensure_file(self, filename, content='Test content\n'):
721 def ensure_file(self, filename, content='Test content\n'):
717 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
722 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
718 self.add_file(self.repo, filename, content)
723 self.add_file(self.repo, filename, content)
719
724
720
725
721 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
726 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
722 if backend_alias not in request.config.getoption('--backends'):
727 if backend_alias not in request.config.getoption('--backends'):
723 pytest.skip("Backend %s not selected." % (backend_alias, ))
728 pytest.skip("Backend %s not selected." % (backend_alias, ))
724
729
725 utils.check_xfail_backends(request.node, backend_alias)
730 utils.check_xfail_backends(request.node, backend_alias)
726 utils.check_skip_backends(request.node, backend_alias)
731 utils.check_skip_backends(request.node, backend_alias)
727
732
728 repo_name = f'vcs_test_{backend_alias}'
733 repo_name = f'vcs_test_{backend_alias}'
729 repo_path = os.path.join(tests_tmp_path, repo_name)
734 repo_path = os.path.join(tests_tmp_path, repo_name)
730 backend = VcsBackend(
735 backend = VcsBackend(
731 alias=backend_alias,
736 alias=backend_alias,
732 repo_path=repo_path,
737 repo_path=repo_path,
733 test_name=request.node.name,
738 test_name=request.node.name,
734 test_repo_container=test_repo)
739 test_repo_container=test_repo)
735 request.addfinalizer(backend.cleanup)
740 request.addfinalizer(backend.cleanup)
736 return backend
741 return backend
737
742
738
743
739 @pytest.fixture()
744 @pytest.fixture()
740 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
745 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
741 """
746 """
742 Parametrized fixture which represents a single vcs backend implementation.
747 Parametrized fixture which represents a single vcs backend implementation.
743
748
744 See the fixture `backend` for more details. This one implements the same
749 See the fixture `backend` for more details. This one implements the same
745 concept, but on vcs level. So it does not provide model instances etc.
750 concept, but on vcs level. So it does not provide model instances etc.
746
751
747 Parameters are generated dynamically, see :func:`pytest_generate_tests`
752 Parameters are generated dynamically, see :func:`pytest_generate_tests`
748 for how this works.
753 for how this works.
749 """
754 """
750 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
755 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
751
756
752
757
753 @pytest.fixture()
758 @pytest.fixture()
754 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
759 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
755 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
760 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
756
761
757
762
758 @pytest.fixture()
763 @pytest.fixture()
759 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
764 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
760 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
765 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
761
766
762
767
763 @pytest.fixture()
768 @pytest.fixture()
764 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
769 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
765 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
770 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
766
771
767
772
768 @pytest.fixture()
773 @pytest.fixture()
769 def vcsbackend_stub(vcsbackend_git):
774 def vcsbackend_stub(vcsbackend_git):
770 """
775 """
771 Use this to express that your test just needs a stub of a vcsbackend.
776 Use this to express that your test just needs a stub of a vcsbackend.
772
777
773 Plan is to eventually implement an in-memory stub to speed tests up.
778 Plan is to eventually implement an in-memory stub to speed tests up.
774 """
779 """
775 return vcsbackend_git
780 return vcsbackend_git
776
781
777
782
778 def _add_commits_to_repo(vcs_repo, commits):
783 def _add_commits_to_repo(vcs_repo, commits):
779 commit_ids = {}
784 commit_ids = {}
780 if not commits:
785 if not commits:
781 return commit_ids
786 return commit_ids
782
787
783 imc = vcs_repo.in_memory_commit
788 imc = vcs_repo.in_memory_commit
784 commit = None
789 commit = None
785
790
786 for idx, commit in enumerate(commits):
791 for idx, commit in enumerate(commits):
787 message = str(commit.get('message', 'Commit %s' % idx))
792 message = str(commit.get('message', 'Commit %s' % idx))
788
793
789 for node in commit.get('added', []):
794 for node in commit.get('added', []):
790 imc.add(FileNode(safe_bytes(node.path), content=node.content))
795 imc.add(FileNode(safe_bytes(node.path), content=node.content))
791 for node in commit.get('changed', []):
796 for node in commit.get('changed', []):
792 imc.change(FileNode(safe_bytes(node.path), content=node.content))
797 imc.change(FileNode(safe_bytes(node.path), content=node.content))
793 for node in commit.get('removed', []):
798 for node in commit.get('removed', []):
794 imc.remove(FileNode(safe_bytes(node.path)))
799 imc.remove(FileNode(safe_bytes(node.path)))
795
800
796 parents = [
801 parents = [
797 vcs_repo.get_commit(commit_id=commit_ids[p])
802 vcs_repo.get_commit(commit_id=commit_ids[p])
798 for p in commit.get('parents', [])]
803 for p in commit.get('parents', [])]
799
804
800 operations = ('added', 'changed', 'removed')
805 operations = ('added', 'changed', 'removed')
801 if not any((commit.get(o) for o in operations)):
806 if not any((commit.get(o) for o in operations)):
802 imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message)))
807 imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message)))
803
808
804 commit = imc.commit(
809 commit = imc.commit(
805 message=message,
810 message=message,
806 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
811 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
807 date=commit.get('date'),
812 date=commit.get('date'),
808 branch=commit.get('branch'),
813 branch=commit.get('branch'),
809 parents=parents)
814 parents=parents)
810
815
811 commit_ids[commit.message] = commit.raw_id
816 commit_ids[commit.message] = commit.raw_id
812
817
813 return commit_ids
818 return commit_ids
814
819
815
820
816 @pytest.fixture()
821 @pytest.fixture()
817 def reposerver(request):
822 def reposerver(request):
818 """
823 """
819 Allows to serve a backend repository
824 Allows to serve a backend repository
820 """
825 """
821
826
822 repo_server = RepoServer()
827 repo_server = RepoServer()
823 request.addfinalizer(repo_server.cleanup)
828 request.addfinalizer(repo_server.cleanup)
824 return repo_server
829 return repo_server
825
830
826
831
827 class RepoServer(object):
832 class RepoServer(object):
828 """
833 """
829 Utility to serve a local repository for the duration of a test case.
834 Utility to serve a local repository for the duration of a test case.
830
835
831 Supports only Subversion so far.
836 Supports only Subversion so far.
832 """
837 """
833
838
834 url = None
839 url = None
835
840
836 def __init__(self):
841 def __init__(self):
837 self._cleanup_servers = []
842 self._cleanup_servers = []
838
843
839 def serve(self, vcsrepo):
844 def serve(self, vcsrepo):
840 if vcsrepo.alias != 'svn':
845 if vcsrepo.alias != 'svn':
841 raise TypeError("Backend %s not supported" % vcsrepo.alias)
846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
842
847
843 proc = subprocess.Popen(
848 proc = subprocess.Popen(
844 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
849 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
845 '--root', vcsrepo.path])
850 '--root', vcsrepo.path])
846 self._cleanup_servers.append(proc)
851 self._cleanup_servers.append(proc)
847 self.url = 'svn://localhost'
852 self.url = 'svn://localhost'
848
853
849 def cleanup(self):
854 def cleanup(self):
850 for proc in self._cleanup_servers:
855 for proc in self._cleanup_servers:
851 proc.terminate()
856 proc.terminate()
852
857
853
858
854 @pytest.fixture()
859 @pytest.fixture()
855 def pr_util(backend, request, config_stub):
860 def pr_util(backend, request, config_stub):
856 """
861 """
857 Utility for tests of models and for functional tests around pull requests.
862 Utility for tests of models and for functional tests around pull requests.
858
863
859 It gives an instance of :class:`PRTestUtility` which provides various
864 It gives an instance of :class:`PRTestUtility` which provides various
860 utility methods around one pull request.
865 utility methods around one pull request.
861
866
862 This fixture uses `backend` and inherits its parameterization.
867 This fixture uses `backend` and inherits its parameterization.
863 """
868 """
864
869
865 util = PRTestUtility(backend)
870 util = PRTestUtility(backend)
866 request.addfinalizer(util.cleanup)
871 request.addfinalizer(util.cleanup)
867
872
868 return util
873 return util
869
874
870
875
871 class PRTestUtility(object):
876 class PRTestUtility(object):
872
877
873 pull_request = None
878 pull_request = None
874 pull_request_id = None
879 pull_request_id = None
875 mergeable_patcher = None
880 mergeable_patcher = None
876 mergeable_mock = None
881 mergeable_mock = None
877 notification_patcher = None
882 notification_patcher = None
878
883
879 def __init__(self, backend):
884 def __init__(self, backend):
880 self.backend = backend
885 self.backend = backend
881
886
882 def create_pull_request(
887 def create_pull_request(
883 self, commits=None, target_head=None, source_head=None,
888 self, commits=None, target_head=None, source_head=None,
884 revisions=None, approved=False, author=None, mergeable=False,
889 revisions=None, approved=False, author=None, mergeable=False,
885 enable_notifications=True, name_suffix='', reviewers=None, observers=None,
890 enable_notifications=True, name_suffix='', reviewers=None, observers=None,
886 title=u"Test", description=u"Description"):
891 title=u"Test", description=u"Description"):
887 self.set_mergeable(mergeable)
892 self.set_mergeable(mergeable)
888 if not enable_notifications:
893 if not enable_notifications:
889 # mock notification side effect
894 # mock notification side effect
890 self.notification_patcher = mock.patch(
895 self.notification_patcher = mock.patch(
891 'rhodecode.model.notification.NotificationModel.create')
896 'rhodecode.model.notification.NotificationModel.create')
892 self.notification_patcher.start()
897 self.notification_patcher.start()
893
898
894 if not self.pull_request:
899 if not self.pull_request:
895 if not commits:
900 if not commits:
896 commits = [
901 commits = [
897 {'message': 'c1'},
902 {'message': 'c1'},
898 {'message': 'c2'},
903 {'message': 'c2'},
899 {'message': 'c3'},
904 {'message': 'c3'},
900 ]
905 ]
901 target_head = 'c1'
906 target_head = 'c1'
902 source_head = 'c2'
907 source_head = 'c2'
903 revisions = ['c2']
908 revisions = ['c2']
904
909
905 self.commit_ids = self.backend.create_master_repo(commits)
910 self.commit_ids = self.backend.create_master_repo(commits)
906 self.target_repository = self.backend.create_repo(
911 self.target_repository = self.backend.create_repo(
907 heads=[target_head], name_suffix=name_suffix)
912 heads=[target_head], name_suffix=name_suffix)
908 self.source_repository = self.backend.create_repo(
913 self.source_repository = self.backend.create_repo(
909 heads=[source_head], name_suffix=name_suffix)
914 heads=[source_head], name_suffix=name_suffix)
910 self.author = author or UserModel().get_by_username(
915 self.author = author or UserModel().get_by_username(
911 TEST_USER_ADMIN_LOGIN)
916 TEST_USER_ADMIN_LOGIN)
912
917
913 model = PullRequestModel()
918 model = PullRequestModel()
914 self.create_parameters = {
919 self.create_parameters = {
915 'created_by': self.author,
920 'created_by': self.author,
916 'source_repo': self.source_repository.repo_name,
921 'source_repo': self.source_repository.repo_name,
917 'source_ref': self._default_branch_reference(source_head),
922 'source_ref': self._default_branch_reference(source_head),
918 'target_repo': self.target_repository.repo_name,
923 'target_repo': self.target_repository.repo_name,
919 'target_ref': self._default_branch_reference(target_head),
924 'target_ref': self._default_branch_reference(target_head),
920 'revisions': [self.commit_ids[r] for r in revisions],
925 'revisions': [self.commit_ids[r] for r in revisions],
921 'reviewers': reviewers or self._get_reviewers(),
926 'reviewers': reviewers or self._get_reviewers(),
922 'observers': observers or self._get_observers(),
927 'observers': observers or self._get_observers(),
923 'title': title,
928 'title': title,
924 'description': description,
929 'description': description,
925 }
930 }
926 self.pull_request = model.create(**self.create_parameters)
931 self.pull_request = model.create(**self.create_parameters)
927 assert model.get_versions(self.pull_request) == []
932 assert model.get_versions(self.pull_request) == []
928
933
929 self.pull_request_id = self.pull_request.pull_request_id
934 self.pull_request_id = self.pull_request.pull_request_id
930
935
931 if approved:
936 if approved:
932 self.approve()
937 self.approve()
933
938
934 Session().add(self.pull_request)
939 Session().add(self.pull_request)
935 Session().commit()
940 Session().commit()
936
941
937 return self.pull_request
942 return self.pull_request
938
943
939 def approve(self):
944 def approve(self):
940 self.create_status_votes(
945 self.create_status_votes(
941 ChangesetStatus.STATUS_APPROVED,
946 ChangesetStatus.STATUS_APPROVED,
942 *self.pull_request.reviewers)
947 *self.pull_request.reviewers)
943
948
944 def close(self):
949 def close(self):
945 PullRequestModel().close_pull_request(self.pull_request, self.author)
950 PullRequestModel().close_pull_request(self.pull_request, self.author)
946
951
947 def _default_branch_reference(self, commit_message):
952 def _default_branch_reference(self, commit_message):
948 reference = '%s:%s:%s' % (
953 reference = '%s:%s:%s' % (
949 'branch',
954 'branch',
950 self.backend.default_branch_name,
955 self.backend.default_branch_name,
951 self.commit_ids[commit_message])
956 self.commit_ids[commit_message])
952 return reference
957 return reference
953
958
954 def _get_reviewers(self):
959 def _get_reviewers(self):
955 role = PullRequestReviewers.ROLE_REVIEWER
960 role = PullRequestReviewers.ROLE_REVIEWER
956 return [
961 return [
957 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
962 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
958 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
963 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
959 ]
964 ]
960
965
961 def _get_observers(self):
966 def _get_observers(self):
962 return [
967 return [
963
968
964 ]
969 ]
965
970
966 def update_source_repository(self, head=None):
971 def update_source_repository(self, head=None):
967 heads = [head or 'c3']
972 heads = [head or 'c3']
968 self.backend.pull_heads(self.source_repository, heads=heads)
973 self.backend.pull_heads(self.source_repository, heads=heads)
969
974
970 def add_one_commit(self, head=None):
975 def add_one_commit(self, head=None):
971 self.update_source_repository(head=head)
976 self.update_source_repository(head=head)
972 old_commit_ids = set(self.pull_request.revisions)
977 old_commit_ids = set(self.pull_request.revisions)
973 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
978 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
974 commit_ids = set(self.pull_request.revisions)
979 commit_ids = set(self.pull_request.revisions)
975 new_commit_ids = commit_ids - old_commit_ids
980 new_commit_ids = commit_ids - old_commit_ids
976 assert len(new_commit_ids) == 1
981 assert len(new_commit_ids) == 1
977 return new_commit_ids.pop()
982 return new_commit_ids.pop()
978
983
979 def remove_one_commit(self):
984 def remove_one_commit(self):
980 assert len(self.pull_request.revisions) == 2
985 assert len(self.pull_request.revisions) == 2
981 source_vcs = self.source_repository.scm_instance()
986 source_vcs = self.source_repository.scm_instance()
982 removed_commit_id = source_vcs.commit_ids[-1]
987 removed_commit_id = source_vcs.commit_ids[-1]
983
988
984 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
989 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
985 # remove the if once that's sorted out.
990 # remove the if once that's sorted out.
986 if self.backend.alias == "git":
991 if self.backend.alias == "git":
987 kwargs = {'branch_name': self.backend.default_branch_name}
992 kwargs = {'branch_name': self.backend.default_branch_name}
988 else:
993 else:
989 kwargs = {}
994 kwargs = {}
990 source_vcs.strip(removed_commit_id, **kwargs)
995 source_vcs.strip(removed_commit_id, **kwargs)
991
996
992 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
997 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
993 assert len(self.pull_request.revisions) == 1
998 assert len(self.pull_request.revisions) == 1
994 return removed_commit_id
999 return removed_commit_id
995
1000
996 def create_comment(self, linked_to=None):
1001 def create_comment(self, linked_to=None):
997 comment = CommentsModel().create(
1002 comment = CommentsModel().create(
998 text=u"Test comment",
1003 text=u"Test comment",
999 repo=self.target_repository.repo_name,
1004 repo=self.target_repository.repo_name,
1000 user=self.author,
1005 user=self.author,
1001 pull_request=self.pull_request)
1006 pull_request=self.pull_request)
1002 assert comment.pull_request_version_id is None
1007 assert comment.pull_request_version_id is None
1003
1008
1004 if linked_to:
1009 if linked_to:
1005 PullRequestModel()._link_comments_to_version(linked_to)
1010 PullRequestModel()._link_comments_to_version(linked_to)
1006
1011
1007 return comment
1012 return comment
1008
1013
1009 def create_inline_comment(
1014 def create_inline_comment(
1010 self, linked_to=None, line_no='n1', file_path='file_1'):
1015 self, linked_to=None, line_no='n1', file_path='file_1'):
1011 comment = CommentsModel().create(
1016 comment = CommentsModel().create(
1012 text=u"Test comment",
1017 text=u"Test comment",
1013 repo=self.target_repository.repo_name,
1018 repo=self.target_repository.repo_name,
1014 user=self.author,
1019 user=self.author,
1015 line_no=line_no,
1020 line_no=line_no,
1016 f_path=file_path,
1021 f_path=file_path,
1017 pull_request=self.pull_request)
1022 pull_request=self.pull_request)
1018 assert comment.pull_request_version_id is None
1023 assert comment.pull_request_version_id is None
1019
1024
1020 if linked_to:
1025 if linked_to:
1021 PullRequestModel()._link_comments_to_version(linked_to)
1026 PullRequestModel()._link_comments_to_version(linked_to)
1022
1027
1023 return comment
1028 return comment
1024
1029
1025 def create_version_of_pull_request(self):
1030 def create_version_of_pull_request(self):
1026 pull_request = self.create_pull_request()
1031 pull_request = self.create_pull_request()
1027 version = PullRequestModel()._create_version_from_snapshot(
1032 version = PullRequestModel()._create_version_from_snapshot(
1028 pull_request)
1033 pull_request)
1029 return version
1034 return version
1030
1035
1031 def create_status_votes(self, status, *reviewers):
1036 def create_status_votes(self, status, *reviewers):
1032 for reviewer in reviewers:
1037 for reviewer in reviewers:
1033 ChangesetStatusModel().set_status(
1038 ChangesetStatusModel().set_status(
1034 repo=self.pull_request.target_repo,
1039 repo=self.pull_request.target_repo,
1035 status=status,
1040 status=status,
1036 user=reviewer.user_id,
1041 user=reviewer.user_id,
1037 pull_request=self.pull_request)
1042 pull_request=self.pull_request)
1038
1043
1039 def set_mergeable(self, value):
1044 def set_mergeable(self, value):
1040 if not self.mergeable_patcher:
1045 if not self.mergeable_patcher:
1041 self.mergeable_patcher = mock.patch.object(
1046 self.mergeable_patcher = mock.patch.object(
1042 VcsSettingsModel, 'get_general_settings')
1047 VcsSettingsModel, 'get_general_settings')
1043 self.mergeable_mock = self.mergeable_patcher.start()
1048 self.mergeable_mock = self.mergeable_patcher.start()
1044 self.mergeable_mock.return_value = {
1049 self.mergeable_mock.return_value = {
1045 'rhodecode_pr_merge_enabled': value}
1050 'rhodecode_pr_merge_enabled': value}
1046
1051
1047 def cleanup(self):
1052 def cleanup(self):
1048 # In case the source repository is already cleaned up, the pull
1053 # In case the source repository is already cleaned up, the pull
1049 # request will already be deleted.
1054 # request will already be deleted.
1050 pull_request = PullRequest().get(self.pull_request_id)
1055 pull_request = PullRequest().get(self.pull_request_id)
1051 if pull_request:
1056 if pull_request:
1052 PullRequestModel().delete(pull_request, pull_request.author)
1057 PullRequestModel().delete(pull_request, pull_request.author)
1053 Session().commit()
1058 Session().commit()
1054
1059
1055 if self.notification_patcher:
1060 if self.notification_patcher:
1056 self.notification_patcher.stop()
1061 self.notification_patcher.stop()
1057
1062
1058 if self.mergeable_patcher:
1063 if self.mergeable_patcher:
1059 self.mergeable_patcher.stop()
1064 self.mergeable_patcher.stop()
1060
1065
1061
1066
1062 @pytest.fixture()
1067 @pytest.fixture()
1063 def user_admin(baseapp):
1068 def user_admin(baseapp):
1064 """
1069 """
1065 Provides the default admin test user as an instance of `db.User`.
1070 Provides the default admin test user as an instance of `db.User`.
1066 """
1071 """
1067 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1072 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1068 return user
1073 return user
1069
1074
1070
1075
1071 @pytest.fixture()
1076 @pytest.fixture()
1072 def user_regular(baseapp):
1077 def user_regular(baseapp):
1073 """
1078 """
1074 Provides the default regular test user as an instance of `db.User`.
1079 Provides the default regular test user as an instance of `db.User`.
1075 """
1080 """
1076 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1081 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1077 return user
1082 return user
1078
1083
1079
1084
1080 @pytest.fixture()
1085 @pytest.fixture()
1081 def user_util(request, db_connection):
1086 def user_util(request, db_connection):
1082 """
1087 """
1083 Provides a wired instance of `UserUtility` with integrated cleanup.
1088 Provides a wired instance of `UserUtility` with integrated cleanup.
1084 """
1089 """
1085 utility = UserUtility(test_name=request.node.name)
1090 utility = UserUtility(test_name=request.node.name)
1086 request.addfinalizer(utility.cleanup)
1091 request.addfinalizer(utility.cleanup)
1087 return utility
1092 return utility
1088
1093
1089
1094
1090 # TODO: johbo: Split this up into utilities per domain or something similar
1095 # TODO: johbo: Split this up into utilities per domain or something similar
1091 class UserUtility(object):
1096 class UserUtility(object):
1092
1097
1093 def __init__(self, test_name="test"):
1098 def __init__(self, test_name="test"):
1094 self._test_name = self._sanitize_name(test_name)
1099 self._test_name = self._sanitize_name(test_name)
1095 self.fixture = Fixture()
1100 self.fixture = Fixture()
1096 self.repo_group_ids = []
1101 self.repo_group_ids = []
1097 self.repos_ids = []
1102 self.repos_ids = []
1098 self.user_ids = []
1103 self.user_ids = []
1099 self.user_group_ids = []
1104 self.user_group_ids = []
1100 self.user_repo_permission_ids = []
1105 self.user_repo_permission_ids = []
1101 self.user_group_repo_permission_ids = []
1106 self.user_group_repo_permission_ids = []
1102 self.user_repo_group_permission_ids = []
1107 self.user_repo_group_permission_ids = []
1103 self.user_group_repo_group_permission_ids = []
1108 self.user_group_repo_group_permission_ids = []
1104 self.user_user_group_permission_ids = []
1109 self.user_user_group_permission_ids = []
1105 self.user_group_user_group_permission_ids = []
1110 self.user_group_user_group_permission_ids = []
1106 self.user_permissions = []
1111 self.user_permissions = []
1107
1112
1108 def _sanitize_name(self, name):
1113 def _sanitize_name(self, name):
1109 for char in ['[', ']']:
1114 for char in ['[', ']']:
1110 name = name.replace(char, '_')
1115 name = name.replace(char, '_')
1111 return name
1116 return name
1112
1117
1113 def create_repo_group(
1118 def create_repo_group(
1114 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1119 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1115 group_name = "{prefix}_repogroup_{count}".format(
1120 group_name = "{prefix}_repogroup_{count}".format(
1116 prefix=self._test_name,
1121 prefix=self._test_name,
1117 count=len(self.repo_group_ids))
1122 count=len(self.repo_group_ids))
1118 repo_group = self.fixture.create_repo_group(
1123 repo_group = self.fixture.create_repo_group(
1119 group_name, cur_user=owner)
1124 group_name, cur_user=owner)
1120 if auto_cleanup:
1125 if auto_cleanup:
1121 self.repo_group_ids.append(repo_group.group_id)
1126 self.repo_group_ids.append(repo_group.group_id)
1122 return repo_group
1127 return repo_group
1123
1128
1124 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1129 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1125 auto_cleanup=True, repo_type='hg', bare=False):
1130 auto_cleanup=True, repo_type='hg', bare=False):
1126 repo_name = "{prefix}_repository_{count}".format(
1131 repo_name = "{prefix}_repository_{count}".format(
1127 prefix=self._test_name,
1132 prefix=self._test_name,
1128 count=len(self.repos_ids))
1133 count=len(self.repos_ids))
1129
1134
1130 repository = self.fixture.create_repo(
1135 repository = self.fixture.create_repo(
1131 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1136 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1132 if auto_cleanup:
1137 if auto_cleanup:
1133 self.repos_ids.append(repository.repo_id)
1138 self.repos_ids.append(repository.repo_id)
1134 return repository
1139 return repository
1135
1140
1136 def create_user(self, auto_cleanup=True, **kwargs):
1141 def create_user(self, auto_cleanup=True, **kwargs):
1137 user_name = "{prefix}_user_{count}".format(
1142 user_name = "{prefix}_user_{count}".format(
1138 prefix=self._test_name,
1143 prefix=self._test_name,
1139 count=len(self.user_ids))
1144 count=len(self.user_ids))
1140 user = self.fixture.create_user(user_name, **kwargs)
1145 user = self.fixture.create_user(user_name, **kwargs)
1141 if auto_cleanup:
1146 if auto_cleanup:
1142 self.user_ids.append(user.user_id)
1147 self.user_ids.append(user.user_id)
1143 return user
1148 return user
1144
1149
1145 def create_additional_user_email(self, user, email):
1150 def create_additional_user_email(self, user, email):
1146 uem = self.fixture.create_additional_user_email(user=user, email=email)
1151 uem = self.fixture.create_additional_user_email(user=user, email=email)
1147 return uem
1152 return uem
1148
1153
1149 def create_user_with_group(self):
1154 def create_user_with_group(self):
1150 user = self.create_user()
1155 user = self.create_user()
1151 user_group = self.create_user_group(members=[user])
1156 user_group = self.create_user_group(members=[user])
1152 return user, user_group
1157 return user, user_group
1153
1158
1154 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1159 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1155 auto_cleanup=True, **kwargs):
1160 auto_cleanup=True, **kwargs):
1156 group_name = "{prefix}_usergroup_{count}".format(
1161 group_name = "{prefix}_usergroup_{count}".format(
1157 prefix=self._test_name,
1162 prefix=self._test_name,
1158 count=len(self.user_group_ids))
1163 count=len(self.user_group_ids))
1159 user_group = self.fixture.create_user_group(
1164 user_group = self.fixture.create_user_group(
1160 group_name, cur_user=owner, **kwargs)
1165 group_name, cur_user=owner, **kwargs)
1161
1166
1162 if auto_cleanup:
1167 if auto_cleanup:
1163 self.user_group_ids.append(user_group.users_group_id)
1168 self.user_group_ids.append(user_group.users_group_id)
1164 if members:
1169 if members:
1165 for user in members:
1170 for user in members:
1166 UserGroupModel().add_user_to_group(user_group, user)
1171 UserGroupModel().add_user_to_group(user_group, user)
1167 return user_group
1172 return user_group
1168
1173
1169 def grant_user_permission(self, user_name, permission_name):
1174 def grant_user_permission(self, user_name, permission_name):
1170 self.inherit_default_user_permissions(user_name, False)
1175 self.inherit_default_user_permissions(user_name, False)
1171 self.user_permissions.append((user_name, permission_name))
1176 self.user_permissions.append((user_name, permission_name))
1172
1177
1173 def grant_user_permission_to_repo_group(
1178 def grant_user_permission_to_repo_group(
1174 self, repo_group, user, permission_name):
1179 self, repo_group, user, permission_name):
1175 permission = RepoGroupModel().grant_user_permission(
1180 permission = RepoGroupModel().grant_user_permission(
1176 repo_group, user, permission_name)
1181 repo_group, user, permission_name)
1177 self.user_repo_group_permission_ids.append(
1182 self.user_repo_group_permission_ids.append(
1178 (repo_group.group_id, user.user_id))
1183 (repo_group.group_id, user.user_id))
1179 return permission
1184 return permission
1180
1185
1181 def grant_user_group_permission_to_repo_group(
1186 def grant_user_group_permission_to_repo_group(
1182 self, repo_group, user_group, permission_name):
1187 self, repo_group, user_group, permission_name):
1183 permission = RepoGroupModel().grant_user_group_permission(
1188 permission = RepoGroupModel().grant_user_group_permission(
1184 repo_group, user_group, permission_name)
1189 repo_group, user_group, permission_name)
1185 self.user_group_repo_group_permission_ids.append(
1190 self.user_group_repo_group_permission_ids.append(
1186 (repo_group.group_id, user_group.users_group_id))
1191 (repo_group.group_id, user_group.users_group_id))
1187 return permission
1192 return permission
1188
1193
1189 def grant_user_permission_to_repo(
1194 def grant_user_permission_to_repo(
1190 self, repo, user, permission_name):
1195 self, repo, user, permission_name):
1191 permission = RepoModel().grant_user_permission(
1196 permission = RepoModel().grant_user_permission(
1192 repo, user, permission_name)
1197 repo, user, permission_name)
1193 self.user_repo_permission_ids.append(
1198 self.user_repo_permission_ids.append(
1194 (repo.repo_id, user.user_id))
1199 (repo.repo_id, user.user_id))
1195 return permission
1200 return permission
1196
1201
1197 def grant_user_group_permission_to_repo(
1202 def grant_user_group_permission_to_repo(
1198 self, repo, user_group, permission_name):
1203 self, repo, user_group, permission_name):
1199 permission = RepoModel().grant_user_group_permission(
1204 permission = RepoModel().grant_user_group_permission(
1200 repo, user_group, permission_name)
1205 repo, user_group, permission_name)
1201 self.user_group_repo_permission_ids.append(
1206 self.user_group_repo_permission_ids.append(
1202 (repo.repo_id, user_group.users_group_id))
1207 (repo.repo_id, user_group.users_group_id))
1203 return permission
1208 return permission
1204
1209
1205 def grant_user_permission_to_user_group(
1210 def grant_user_permission_to_user_group(
1206 self, target_user_group, user, permission_name):
1211 self, target_user_group, user, permission_name):
1207 permission = UserGroupModel().grant_user_permission(
1212 permission = UserGroupModel().grant_user_permission(
1208 target_user_group, user, permission_name)
1213 target_user_group, user, permission_name)
1209 self.user_user_group_permission_ids.append(
1214 self.user_user_group_permission_ids.append(
1210 (target_user_group.users_group_id, user.user_id))
1215 (target_user_group.users_group_id, user.user_id))
1211 return permission
1216 return permission
1212
1217
1213 def grant_user_group_permission_to_user_group(
1218 def grant_user_group_permission_to_user_group(
1214 self, target_user_group, user_group, permission_name):
1219 self, target_user_group, user_group, permission_name):
1215 permission = UserGroupModel().grant_user_group_permission(
1220 permission = UserGroupModel().grant_user_group_permission(
1216 target_user_group, user_group, permission_name)
1221 target_user_group, user_group, permission_name)
1217 self.user_group_user_group_permission_ids.append(
1222 self.user_group_user_group_permission_ids.append(
1218 (target_user_group.users_group_id, user_group.users_group_id))
1223 (target_user_group.users_group_id, user_group.users_group_id))
1219 return permission
1224 return permission
1220
1225
1221 def revoke_user_permission(self, user_name, permission_name):
1226 def revoke_user_permission(self, user_name, permission_name):
1222 self.inherit_default_user_permissions(user_name, True)
1227 self.inherit_default_user_permissions(user_name, True)
1223 UserModel().revoke_perm(user_name, permission_name)
1228 UserModel().revoke_perm(user_name, permission_name)
1224
1229
1225 def inherit_default_user_permissions(self, user_name, value):
1230 def inherit_default_user_permissions(self, user_name, value):
1226 user = UserModel().get_by_username(user_name)
1231 user = UserModel().get_by_username(user_name)
1227 user.inherit_default_permissions = value
1232 user.inherit_default_permissions = value
1228 Session().add(user)
1233 Session().add(user)
1229 Session().commit()
1234 Session().commit()
1230
1235
1231 def cleanup(self):
1236 def cleanup(self):
1232 self._cleanup_permissions()
1237 self._cleanup_permissions()
1233 self._cleanup_repos()
1238 self._cleanup_repos()
1234 self._cleanup_repo_groups()
1239 self._cleanup_repo_groups()
1235 self._cleanup_user_groups()
1240 self._cleanup_user_groups()
1236 self._cleanup_users()
1241 self._cleanup_users()
1237
1242
1238 def _cleanup_permissions(self):
1243 def _cleanup_permissions(self):
1239 if self.user_permissions:
1244 if self.user_permissions:
1240 for user_name, permission_name in self.user_permissions:
1245 for user_name, permission_name in self.user_permissions:
1241 self.revoke_user_permission(user_name, permission_name)
1246 self.revoke_user_permission(user_name, permission_name)
1242
1247
1243 for permission in self.user_repo_permission_ids:
1248 for permission in self.user_repo_permission_ids:
1244 RepoModel().revoke_user_permission(*permission)
1249 RepoModel().revoke_user_permission(*permission)
1245
1250
1246 for permission in self.user_group_repo_permission_ids:
1251 for permission in self.user_group_repo_permission_ids:
1247 RepoModel().revoke_user_group_permission(*permission)
1252 RepoModel().revoke_user_group_permission(*permission)
1248
1253
1249 for permission in self.user_repo_group_permission_ids:
1254 for permission in self.user_repo_group_permission_ids:
1250 RepoGroupModel().revoke_user_permission(*permission)
1255 RepoGroupModel().revoke_user_permission(*permission)
1251
1256
1252 for permission in self.user_group_repo_group_permission_ids:
1257 for permission in self.user_group_repo_group_permission_ids:
1253 RepoGroupModel().revoke_user_group_permission(*permission)
1258 RepoGroupModel().revoke_user_group_permission(*permission)
1254
1259
1255 for permission in self.user_user_group_permission_ids:
1260 for permission in self.user_user_group_permission_ids:
1256 UserGroupModel().revoke_user_permission(*permission)
1261 UserGroupModel().revoke_user_permission(*permission)
1257
1262
1258 for permission in self.user_group_user_group_permission_ids:
1263 for permission in self.user_group_user_group_permission_ids:
1259 UserGroupModel().revoke_user_group_permission(*permission)
1264 UserGroupModel().revoke_user_group_permission(*permission)
1260
1265
1261 def _cleanup_repo_groups(self):
1266 def _cleanup_repo_groups(self):
1262 def _repo_group_compare(first_group_id, second_group_id):
1267 def _repo_group_compare(first_group_id, second_group_id):
1263 """
1268 """
1264 Gives higher priority to the groups with the most complex paths
1269 Gives higher priority to the groups with the most complex paths
1265 """
1270 """
1266 first_group = RepoGroup.get(first_group_id)
1271 first_group = RepoGroup.get(first_group_id)
1267 second_group = RepoGroup.get(second_group_id)
1272 second_group = RepoGroup.get(second_group_id)
1268 first_group_parts = (
1273 first_group_parts = (
1269 len(first_group.group_name.split('/')) if first_group else 0)
1274 len(first_group.group_name.split('/')) if first_group else 0)
1270 second_group_parts = (
1275 second_group_parts = (
1271 len(second_group.group_name.split('/')) if second_group else 0)
1276 len(second_group.group_name.split('/')) if second_group else 0)
1272 return cmp(second_group_parts, first_group_parts)
1277 return cmp(second_group_parts, first_group_parts)
1273
1278
1274 sorted_repo_group_ids = sorted(
1279 sorted_repo_group_ids = sorted(
1275 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1280 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1276 for repo_group_id in sorted_repo_group_ids:
1281 for repo_group_id in sorted_repo_group_ids:
1277 self.fixture.destroy_repo_group(repo_group_id)
1282 self.fixture.destroy_repo_group(repo_group_id)
1278
1283
1279 def _cleanup_repos(self):
1284 def _cleanup_repos(self):
1280 sorted_repos_ids = sorted(self.repos_ids)
1285 sorted_repos_ids = sorted(self.repos_ids)
1281 for repo_id in sorted_repos_ids:
1286 for repo_id in sorted_repos_ids:
1282 self.fixture.destroy_repo(repo_id)
1287 self.fixture.destroy_repo(repo_id)
1283
1288
1284 def _cleanup_user_groups(self):
1289 def _cleanup_user_groups(self):
1285 def _user_group_compare(first_group_id, second_group_id):
1290 def _user_group_compare(first_group_id, second_group_id):
1286 """
1291 """
1287 Gives higher priority to the groups with the most complex paths
1292 Gives higher priority to the groups with the most complex paths
1288 """
1293 """
1289 first_group = UserGroup.get(first_group_id)
1294 first_group = UserGroup.get(first_group_id)
1290 second_group = UserGroup.get(second_group_id)
1295 second_group = UserGroup.get(second_group_id)
1291 first_group_parts = (
1296 first_group_parts = (
1292 len(first_group.users_group_name.split('/'))
1297 len(first_group.users_group_name.split('/'))
1293 if first_group else 0)
1298 if first_group else 0)
1294 second_group_parts = (
1299 second_group_parts = (
1295 len(second_group.users_group_name.split('/'))
1300 len(second_group.users_group_name.split('/'))
1296 if second_group else 0)
1301 if second_group else 0)
1297 return cmp(second_group_parts, first_group_parts)
1302 return cmp(second_group_parts, first_group_parts)
1298
1303
1299 sorted_user_group_ids = sorted(
1304 sorted_user_group_ids = sorted(
1300 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1305 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1301 for user_group_id in sorted_user_group_ids:
1306 for user_group_id in sorted_user_group_ids:
1302 self.fixture.destroy_user_group(user_group_id)
1307 self.fixture.destroy_user_group(user_group_id)
1303
1308
1304 def _cleanup_users(self):
1309 def _cleanup_users(self):
1305 for user_id in self.user_ids:
1310 for user_id in self.user_ids:
1306 self.fixture.destroy_user(user_id)
1311 self.fixture.destroy_user(user_id)
1307
1312
1308
1313
1309 @pytest.fixture(scope='session')
1314 @pytest.fixture(scope='session')
1310 def testrun():
1315 def testrun():
1311 return {
1316 return {
1312 'uuid': uuid.uuid4(),
1317 'uuid': uuid.uuid4(),
1313 'start': datetime.datetime.utcnow().isoformat(),
1318 'start': datetime.datetime.utcnow().isoformat(),
1314 'timestamp': int(time.time()),
1319 'timestamp': int(time.time()),
1315 }
1320 }
1316
1321
1317
1322
1318 class AppenlightClient(object):
1323 class AppenlightClient(object):
1319
1324
1320 url_template = '{url}?protocol_version=0.5'
1325 url_template = '{url}?protocol_version=0.5'
1321
1326
1322 def __init__(
1327 def __init__(
1323 self, url, api_key, add_server=True, add_timestamp=True,
1328 self, url, api_key, add_server=True, add_timestamp=True,
1324 namespace=None, request=None, testrun=None):
1329 namespace=None, request=None, testrun=None):
1325 self.url = self.url_template.format(url=url)
1330 self.url = self.url_template.format(url=url)
1326 self.api_key = api_key
1331 self.api_key = api_key
1327 self.add_server = add_server
1332 self.add_server = add_server
1328 self.add_timestamp = add_timestamp
1333 self.add_timestamp = add_timestamp
1329 self.namespace = namespace
1334 self.namespace = namespace
1330 self.request = request
1335 self.request = request
1331 self.server = socket.getfqdn(socket.gethostname())
1336 self.server = socket.getfqdn(socket.gethostname())
1332 self.tags_before = {}
1337 self.tags_before = {}
1333 self.tags_after = {}
1338 self.tags_after = {}
1334 self.stats = []
1339 self.stats = []
1335 self.testrun = testrun or {}
1340 self.testrun = testrun or {}
1336
1341
1337 def tag_before(self, tag, value):
1342 def tag_before(self, tag, value):
1338 self.tags_before[tag] = value
1343 self.tags_before[tag] = value
1339
1344
1340 def tag_after(self, tag, value):
1345 def tag_after(self, tag, value):
1341 self.tags_after[tag] = value
1346 self.tags_after[tag] = value
1342
1347
1343 def collect(self, data):
1348 def collect(self, data):
1344 if self.add_server:
1349 if self.add_server:
1345 data.setdefault('server', self.server)
1350 data.setdefault('server', self.server)
1346 if self.add_timestamp:
1351 if self.add_timestamp:
1347 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1352 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1348 if self.namespace:
1353 if self.namespace:
1349 data.setdefault('namespace', self.namespace)
1354 data.setdefault('namespace', self.namespace)
1350 if self.request:
1355 if self.request:
1351 data.setdefault('request', self.request)
1356 data.setdefault('request', self.request)
1352 self.stats.append(data)
1357 self.stats.append(data)
1353
1358
1354 def send_stats(self):
1359 def send_stats(self):
1355 tags = [
1360 tags = [
1356 ('testrun', self.request),
1361 ('testrun', self.request),
1357 ('testrun.start', self.testrun['start']),
1362 ('testrun.start', self.testrun['start']),
1358 ('testrun.timestamp', self.testrun['timestamp']),
1363 ('testrun.timestamp', self.testrun['timestamp']),
1359 ('test', self.namespace),
1364 ('test', self.namespace),
1360 ]
1365 ]
1361 for key, value in self.tags_before.items():
1366 for key, value in self.tags_before.items():
1362 tags.append((key + '.before', value))
1367 tags.append((key + '.before', value))
1363 try:
1368 try:
1364 delta = self.tags_after[key] - value
1369 delta = self.tags_after[key] - value
1365 tags.append((key + '.delta', delta))
1370 tags.append((key + '.delta', delta))
1366 except Exception:
1371 except Exception:
1367 pass
1372 pass
1368 for key, value in self.tags_after.items():
1373 for key, value in self.tags_after.items():
1369 tags.append((key + '.after', value))
1374 tags.append((key + '.after', value))
1370 self.collect({
1375 self.collect({
1371 'message': "Collected tags",
1376 'message': "Collected tags",
1372 'tags': tags,
1377 'tags': tags,
1373 })
1378 })
1374
1379
1375 response = requests.post(
1380 response = requests.post(
1376 self.url,
1381 self.url,
1377 headers={
1382 headers={
1378 'X-appenlight-api-key': self.api_key},
1383 'X-appenlight-api-key': self.api_key},
1379 json=self.stats,
1384 json=self.stats,
1380 )
1385 )
1381
1386
1382 if not response.status_code == 200:
1387 if not response.status_code == 200:
1383 pprint.pprint(self.stats)
1388 pprint.pprint(self.stats)
1384 print(response.headers)
1389 print(response.headers)
1385 print(response.text)
1390 print(response.text)
1386 raise Exception('Sending to appenlight failed')
1391 raise Exception('Sending to appenlight failed')
1387
1392
1388
1393
1389 @pytest.fixture()
1394 @pytest.fixture()
1390 def gist_util(request, db_connection):
1395 def gist_util(request, db_connection):
1391 """
1396 """
1392 Provides a wired instance of `GistUtility` with integrated cleanup.
1397 Provides a wired instance of `GistUtility` with integrated cleanup.
1393 """
1398 """
1394 utility = GistUtility()
1399 utility = GistUtility()
1395 request.addfinalizer(utility.cleanup)
1400 request.addfinalizer(utility.cleanup)
1396 return utility
1401 return utility
1397
1402
1398
1403
1399 class GistUtility(object):
1404 class GistUtility(object):
1400 def __init__(self):
1405 def __init__(self):
1401 self.fixture = Fixture()
1406 self.fixture = Fixture()
1402 self.gist_ids = []
1407 self.gist_ids = []
1403
1408
1404 def create_gist(self, **kwargs):
1409 def create_gist(self, **kwargs):
1405 gist = self.fixture.create_gist(**kwargs)
1410 gist = self.fixture.create_gist(**kwargs)
1406 self.gist_ids.append(gist.gist_id)
1411 self.gist_ids.append(gist.gist_id)
1407 return gist
1412 return gist
1408
1413
1409 def cleanup(self):
1414 def cleanup(self):
1410 for id_ in self.gist_ids:
1415 for id_ in self.gist_ids:
1411 self.fixture.destroy_gists(str(id_))
1416 self.fixture.destroy_gists(str(id_))
1412
1417
1413
1418
1414 @pytest.fixture()
1419 @pytest.fixture()
1415 def enabled_backends(request):
1420 def enabled_backends(request):
1416 backends = request.config.option.backends
1421 backends = request.config.option.backends
1417 return backends[:]
1422 return backends[:]
1418
1423
1419
1424
1420 @pytest.fixture()
1425 @pytest.fixture()
1421 def settings_util(request, db_connection):
1426 def settings_util(request, db_connection):
1422 """
1427 """
1423 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1428 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1424 """
1429 """
1425 utility = SettingsUtility()
1430 utility = SettingsUtility()
1426 request.addfinalizer(utility.cleanup)
1431 request.addfinalizer(utility.cleanup)
1427 return utility
1432 return utility
1428
1433
1429
1434
1430 class SettingsUtility(object):
1435 class SettingsUtility(object):
1431 def __init__(self):
1436 def __init__(self):
1432 self.rhodecode_ui_ids = []
1437 self.rhodecode_ui_ids = []
1433 self.rhodecode_setting_ids = []
1438 self.rhodecode_setting_ids = []
1434 self.repo_rhodecode_ui_ids = []
1439 self.repo_rhodecode_ui_ids = []
1435 self.repo_rhodecode_setting_ids = []
1440 self.repo_rhodecode_setting_ids = []
1436
1441
1437 def create_repo_rhodecode_ui(
1442 def create_repo_rhodecode_ui(
1438 self, repo, section, value, key=None, active=True, cleanup=True):
1443 self, repo, section, value, key=None, active=True, cleanup=True):
1439 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1444 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1440
1445
1441 setting = RepoRhodeCodeUi()
1446 setting = RepoRhodeCodeUi()
1442 setting.repository_id = repo.repo_id
1447 setting.repository_id = repo.repo_id
1443 setting.ui_section = section
1448 setting.ui_section = section
1444 setting.ui_value = value
1449 setting.ui_value = value
1445 setting.ui_key = key
1450 setting.ui_key = key
1446 setting.ui_active = active
1451 setting.ui_active = active
1447 Session().add(setting)
1452 Session().add(setting)
1448 Session().commit()
1453 Session().commit()
1449
1454
1450 if cleanup:
1455 if cleanup:
1451 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1456 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1452 return setting
1457 return setting
1453
1458
1454 def create_rhodecode_ui(
1459 def create_rhodecode_ui(
1455 self, section, value, key=None, active=True, cleanup=True):
1460 self, section, value, key=None, active=True, cleanup=True):
1456 key = key or sha1_safe(f'{section}{value}')
1461 key = key or sha1_safe(f'{section}{value}')
1457
1462
1458 setting = RhodeCodeUi()
1463 setting = RhodeCodeUi()
1459 setting.ui_section = section
1464 setting.ui_section = section
1460 setting.ui_value = value
1465 setting.ui_value = value
1461 setting.ui_key = key
1466 setting.ui_key = key
1462 setting.ui_active = active
1467 setting.ui_active = active
1463 Session().add(setting)
1468 Session().add(setting)
1464 Session().commit()
1469 Session().commit()
1465
1470
1466 if cleanup:
1471 if cleanup:
1467 self.rhodecode_ui_ids.append(setting.ui_id)
1472 self.rhodecode_ui_ids.append(setting.ui_id)
1468 return setting
1473 return setting
1469
1474
1470 def create_repo_rhodecode_setting(
1475 def create_repo_rhodecode_setting(
1471 self, repo, name, value, type_, cleanup=True):
1476 self, repo, name, value, type_, cleanup=True):
1472 setting = RepoRhodeCodeSetting(
1477 setting = RepoRhodeCodeSetting(
1473 repo.repo_id, key=name, val=value, type=type_)
1478 repo.repo_id, key=name, val=value, type=type_)
1474 Session().add(setting)
1479 Session().add(setting)
1475 Session().commit()
1480 Session().commit()
1476
1481
1477 if cleanup:
1482 if cleanup:
1478 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1483 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1479 return setting
1484 return setting
1480
1485
1481 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1486 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1482 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1487 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1483 Session().add(setting)
1488 Session().add(setting)
1484 Session().commit()
1489 Session().commit()
1485
1490
1486 if cleanup:
1491 if cleanup:
1487 self.rhodecode_setting_ids.append(setting.app_settings_id)
1492 self.rhodecode_setting_ids.append(setting.app_settings_id)
1488
1493
1489 return setting
1494 return setting
1490
1495
1491 def cleanup(self):
1496 def cleanup(self):
1492 for id_ in self.rhodecode_ui_ids:
1497 for id_ in self.rhodecode_ui_ids:
1493 setting = RhodeCodeUi.get(id_)
1498 setting = RhodeCodeUi.get(id_)
1494 Session().delete(setting)
1499 Session().delete(setting)
1495
1500
1496 for id_ in self.rhodecode_setting_ids:
1501 for id_ in self.rhodecode_setting_ids:
1497 setting = RhodeCodeSetting.get(id_)
1502 setting = RhodeCodeSetting.get(id_)
1498 Session().delete(setting)
1503 Session().delete(setting)
1499
1504
1500 for id_ in self.repo_rhodecode_ui_ids:
1505 for id_ in self.repo_rhodecode_ui_ids:
1501 setting = RepoRhodeCodeUi.get(id_)
1506 setting = RepoRhodeCodeUi.get(id_)
1502 Session().delete(setting)
1507 Session().delete(setting)
1503
1508
1504 for id_ in self.repo_rhodecode_setting_ids:
1509 for id_ in self.repo_rhodecode_setting_ids:
1505 setting = RepoRhodeCodeSetting.get(id_)
1510 setting = RepoRhodeCodeSetting.get(id_)
1506 Session().delete(setting)
1511 Session().delete(setting)
1507
1512
1508 Session().commit()
1513 Session().commit()
1509
1514
1510
1515
1511 @pytest.fixture()
1516 @pytest.fixture()
1512 def no_notifications(request):
1517 def no_notifications(request):
1513 notification_patcher = mock.patch(
1518 notification_patcher = mock.patch(
1514 'rhodecode.model.notification.NotificationModel.create')
1519 'rhodecode.model.notification.NotificationModel.create')
1515 notification_patcher.start()
1520 notification_patcher.start()
1516 request.addfinalizer(notification_patcher.stop)
1521 request.addfinalizer(notification_patcher.stop)
1517
1522
1518
1523
1519 @pytest.fixture(scope='session')
1524 @pytest.fixture(scope='session')
1520 def repeat(request):
1525 def repeat(request):
1521 """
1526 """
1522 The number of repetitions is based on this fixture.
1527 The number of repetitions is based on this fixture.
1523
1528
1524 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1529 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1525 tests are not too slow in our default test suite.
1530 tests are not too slow in our default test suite.
1526 """
1531 """
1527 return request.config.getoption('--repeat')
1532 return request.config.getoption('--repeat')
1528
1533
1529
1534
1530 @pytest.fixture()
1535 @pytest.fixture()
1531 def rhodecode_fixtures():
1536 def rhodecode_fixtures():
1532 return Fixture()
1537 return Fixture()
1533
1538
1534
1539
1535 @pytest.fixture()
1540 @pytest.fixture()
1536 def context_stub():
1541 def context_stub():
1537 """
1542 """
1538 Stub context object.
1543 Stub context object.
1539 """
1544 """
1540 context = pyramid.testing.DummyResource()
1545 context = pyramid.testing.DummyResource()
1541 return context
1546 return context
1542
1547
1543
1548
1544 @pytest.fixture()
1549 @pytest.fixture()
1545 def request_stub():
1550 def request_stub():
1546 """
1551 """
1547 Stub request object.
1552 Stub request object.
1548 """
1553 """
1549 from rhodecode.lib.base import bootstrap_request
1554 from rhodecode.lib.base import bootstrap_request
1550 request = bootstrap_request(scheme='https')
1555 request = bootstrap_request(scheme='https')
1551 return request
1556 return request
1552
1557
1553
1558
1554 @pytest.fixture()
1559 @pytest.fixture()
1555 def config_stub(request, request_stub):
1560 def config_stub(request, request_stub):
1556 """
1561 """
1557 Set up pyramid.testing and return the Configurator.
1562 Set up pyramid.testing and return the Configurator.
1558 """
1563 """
1559 from rhodecode.lib.base import bootstrap_config
1564 from rhodecode.lib.base import bootstrap_config
1560 config = bootstrap_config(request=request_stub)
1565 config = bootstrap_config(request=request_stub)
1561
1566
1562 @request.addfinalizer
1567 @request.addfinalizer
1563 def cleanup():
1568 def cleanup():
1564 pyramid.testing.tearDown()
1569 pyramid.testing.tearDown()
1565
1570
1566 return config
1571 return config
1567
1572
1568
1573
1569 @pytest.fixture()
1574 @pytest.fixture()
1570 def StubIntegrationType():
1575 def StubIntegrationType():
1571 class _StubIntegrationType(IntegrationTypeBase):
1576 class _StubIntegrationType(IntegrationTypeBase):
1572 """ Test integration type class """
1577 """ Test integration type class """
1573
1578
1574 key = 'test'
1579 key = 'test'
1575 display_name = 'Test integration type'
1580 display_name = 'Test integration type'
1576 description = 'A test integration type for testing'
1581 description = 'A test integration type for testing'
1577
1582
1578 @classmethod
1583 @classmethod
1579 def icon(cls):
1584 def icon(cls):
1580 return 'test_icon_html_image'
1585 return 'test_icon_html_image'
1581
1586
1582 def __init__(self, settings):
1587 def __init__(self, settings):
1583 super(_StubIntegrationType, self).__init__(settings)
1588 super(_StubIntegrationType, self).__init__(settings)
1584 self.sent_events = [] # for testing
1589 self.sent_events = [] # for testing
1585
1590
1586 def send_event(self, event):
1591 def send_event(self, event):
1587 self.sent_events.append(event)
1592 self.sent_events.append(event)
1588
1593
1589 def settings_schema(self):
1594 def settings_schema(self):
1590 class SettingsSchema(colander.Schema):
1595 class SettingsSchema(colander.Schema):
1591 test_string_field = colander.SchemaNode(
1596 test_string_field = colander.SchemaNode(
1592 colander.String(),
1597 colander.String(),
1593 missing=colander.required,
1598 missing=colander.required,
1594 title='test string field',
1599 title='test string field',
1595 )
1600 )
1596 test_int_field = colander.SchemaNode(
1601 test_int_field = colander.SchemaNode(
1597 colander.Int(),
1602 colander.Int(),
1598 title='some integer setting',
1603 title='some integer setting',
1599 )
1604 )
1600 return SettingsSchema()
1605 return SettingsSchema()
1601
1606
1602
1607
1603 integration_type_registry.register_integration_type(_StubIntegrationType)
1608 integration_type_registry.register_integration_type(_StubIntegrationType)
1604 return _StubIntegrationType
1609 return _StubIntegrationType
1605
1610
1606
1611
1607 @pytest.fixture()
1612 @pytest.fixture()
1608 def stub_integration_settings():
1613 def stub_integration_settings():
1609 return {
1614 return {
1610 'test_string_field': 'some data',
1615 'test_string_field': 'some data',
1611 'test_int_field': 100,
1616 'test_int_field': 100,
1612 }
1617 }
1613
1618
1614
1619
1615 @pytest.fixture()
1620 @pytest.fixture()
1616 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1621 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1617 stub_integration_settings):
1622 stub_integration_settings):
1618 integration = IntegrationModel().create(
1623 integration = IntegrationModel().create(
1619 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1624 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1620 name='test repo integration',
1625 name='test repo integration',
1621 repo=repo_stub, repo_group=None, child_repos_only=None)
1626 repo=repo_stub, repo_group=None, child_repos_only=None)
1622
1627
1623 @request.addfinalizer
1628 @request.addfinalizer
1624 def cleanup():
1629 def cleanup():
1625 IntegrationModel().delete(integration)
1630 IntegrationModel().delete(integration)
1626
1631
1627 return integration
1632 return integration
1628
1633
1629
1634
1630 @pytest.fixture()
1635 @pytest.fixture()
1631 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1636 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1632 stub_integration_settings):
1637 stub_integration_settings):
1633 integration = IntegrationModel().create(
1638 integration = IntegrationModel().create(
1634 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1639 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1635 name='test repogroup integration',
1640 name='test repogroup integration',
1636 repo=None, repo_group=test_repo_group, child_repos_only=True)
1641 repo=None, repo_group=test_repo_group, child_repos_only=True)
1637
1642
1638 @request.addfinalizer
1643 @request.addfinalizer
1639 def cleanup():
1644 def cleanup():
1640 IntegrationModel().delete(integration)
1645 IntegrationModel().delete(integration)
1641
1646
1642 return integration
1647 return integration
1643
1648
1644
1649
1645 @pytest.fixture()
1650 @pytest.fixture()
1646 def repogroup_recursive_integration_stub(request, test_repo_group,
1651 def repogroup_recursive_integration_stub(request, test_repo_group,
1647 StubIntegrationType, stub_integration_settings):
1652 StubIntegrationType, stub_integration_settings):
1648 integration = IntegrationModel().create(
1653 integration = IntegrationModel().create(
1649 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1654 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1650 name='test recursive repogroup integration',
1655 name='test recursive repogroup integration',
1651 repo=None, repo_group=test_repo_group, child_repos_only=False)
1656 repo=None, repo_group=test_repo_group, child_repos_only=False)
1652
1657
1653 @request.addfinalizer
1658 @request.addfinalizer
1654 def cleanup():
1659 def cleanup():
1655 IntegrationModel().delete(integration)
1660 IntegrationModel().delete(integration)
1656
1661
1657 return integration
1662 return integration
1658
1663
1659
1664
1660 @pytest.fixture()
1665 @pytest.fixture()
1661 def global_integration_stub(request, StubIntegrationType,
1666 def global_integration_stub(request, StubIntegrationType,
1662 stub_integration_settings):
1667 stub_integration_settings):
1663 integration = IntegrationModel().create(
1668 integration = IntegrationModel().create(
1664 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1669 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1665 name='test global integration',
1670 name='test global integration',
1666 repo=None, repo_group=None, child_repos_only=None)
1671 repo=None, repo_group=None, child_repos_only=None)
1667
1672
1668 @request.addfinalizer
1673 @request.addfinalizer
1669 def cleanup():
1674 def cleanup():
1670 IntegrationModel().delete(integration)
1675 IntegrationModel().delete(integration)
1671
1676
1672 return integration
1677 return integration
1673
1678
1674
1679
1675 @pytest.fixture()
1680 @pytest.fixture()
1676 def root_repos_integration_stub(request, StubIntegrationType,
1681 def root_repos_integration_stub(request, StubIntegrationType,
1677 stub_integration_settings):
1682 stub_integration_settings):
1678 integration = IntegrationModel().create(
1683 integration = IntegrationModel().create(
1679 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1684 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1680 name='test global integration',
1685 name='test global integration',
1681 repo=None, repo_group=None, child_repos_only=True)
1686 repo=None, repo_group=None, child_repos_only=True)
1682
1687
1683 @request.addfinalizer
1688 @request.addfinalizer
1684 def cleanup():
1689 def cleanup():
1685 IntegrationModel().delete(integration)
1690 IntegrationModel().delete(integration)
1686
1691
1687 return integration
1692 return integration
1688
1693
1689
1694
1690 @pytest.fixture()
1695 @pytest.fixture()
1691 def local_dt_to_utc():
1696 def local_dt_to_utc():
1692 def _factory(dt):
1697 def _factory(dt):
1693 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1698 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1694 dateutil.tz.tzutc()).replace(tzinfo=None)
1699 dateutil.tz.tzutc()).replace(tzinfo=None)
1695 return _factory
1700 return _factory
1696
1701
1697
1702
1698 @pytest.fixture()
1703 @pytest.fixture()
1699 def disable_anonymous_user(request, baseapp):
1704 def disable_anonymous_user(request, baseapp):
1700 set_anonymous_access(False)
1705 set_anonymous_access(False)
1701
1706
1702 @request.addfinalizer
1707 @request.addfinalizer
1703 def cleanup():
1708 def cleanup():
1704 set_anonymous_access(True)
1709 set_anonymous_access(True)
1705
1710
1706
1711
1707 @pytest.fixture(scope='module')
1712 @pytest.fixture(scope='module')
1708 def rc_fixture(request):
1713 def rc_fixture(request):
1709 return Fixture()
1714 return Fixture()
1710
1715
1711
1716
1712 @pytest.fixture()
1717 @pytest.fixture()
1713 def repo_groups(request):
1718 def repo_groups(request):
1714 fixture = Fixture()
1719 fixture = Fixture()
1715
1720
1716 session = Session()
1721 session = Session()
1717 zombie_group = fixture.create_repo_group('zombie')
1722 zombie_group = fixture.create_repo_group('zombie')
1718 parent_group = fixture.create_repo_group('parent')
1723 parent_group = fixture.create_repo_group('parent')
1719 child_group = fixture.create_repo_group('parent/child')
1724 child_group = fixture.create_repo_group('parent/child')
1720 groups_in_db = session.query(RepoGroup).all()
1725 groups_in_db = session.query(RepoGroup).all()
1721 assert len(groups_in_db) == 3
1726 assert len(groups_in_db) == 3
1722 assert child_group.group_parent_id == parent_group.group_id
1727 assert child_group.group_parent_id == parent_group.group_id
1723
1728
1724 @request.addfinalizer
1729 @request.addfinalizer
1725 def cleanup():
1730 def cleanup():
1726 fixture.destroy_repo_group(zombie_group)
1731 fixture.destroy_repo_group(zombie_group)
1727 fixture.destroy_repo_group(child_group)
1732 fixture.destroy_repo_group(child_group)
1728 fixture.destroy_repo_group(parent_group)
1733 fixture.destroy_repo_group(parent_group)
1729
1734
1730 return zombie_group, parent_group, child_group
1735 return zombie_group, parent_group, child_group
General Comments 0
You need to be logged in to leave comments. Login now