##// END OF EJS Templates
testing: added webhook tests and fixed tags adding
super-admin -
r4879:e11d95c4 default
parent child Browse files
Show More
@@ -0,0 +1,80 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 """
22 Test suite for making push/pull operations, on specially modified INI files
23
24 .. important::
25
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 to redirect things to stderr instead of stdout.
28 """
29
30 import pytest
31 import requests
32
33 from rhodecode.tests import GIT_REPO, HG_REPO
34 from rhodecode.tests.vcs_operations import Command, _add_files_and_push
35
36
37 def check_connection():
38 try:
39 response = requests.get('http://httpbin.org')
40 return response.status_code == 200
41 except Exception as e:
42 print(e)
43
44 return False
45
46
47 connection_available = pytest.mark.skipif(
48 not check_connection(), reason="No outside internet connection available")
49
50
51 @pytest.mark.usefixtures("baseapp", "enable_webhook_push_integration")
52 class TestVCSOperationsOnCustomIniConfig(object):
53
54 def test_push_with_webhook_hg(self, rc_web_server, tmpdir):
55 clone_url = rc_web_server.repo_clone_url(HG_REPO)
56
57 stdout, stderr = Command('/tmp').execute(
58 'hg clone', clone_url, tmpdir.strpath)
59
60 push_url = rc_web_server.repo_clone_url(HG_REPO)
61 _add_files_and_push('hg', tmpdir.strpath, clone_url=push_url)
62
63 rc_log = rc_web_server.get_rc_log()
64 assert 'ERROR' not in rc_log
65 assert "executing task TASK:<@task: rhodecode.integrations.types.webhook.post_to_webhook" in rc_log
66 assert "handling event repo-push with integration <rhodecode.integrations.types.webhook.WebhookIntegrationType" in rc_log
67
68 def test_push_with_webhook_gut(self, rc_web_server, tmpdir):
69 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
70
71 stdout, stderr = Command('/tmp').execute(
72 'git clone', clone_url, tmpdir.strpath)
73
74 push_url = rc_web_server.repo_clone_url(GIT_REPO)
75 _add_files_and_push('git', tmpdir.strpath, clone_url=push_url)
76
77 rc_log = rc_web_server.get_rc_log()
78 assert 'ERROR' not in rc_log
79 assert "executing task TASK:<@task: rhodecode.integrations.types.webhook.post_to_webhook" in rc_log
80 assert "handling event repo-push with integration <rhodecode.integrations.types.webhook.WebhookIntegrationType" in rc_log
@@ -1,1092 +1,1092 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
27 27 from rhodecode.apps.repository.views.repo_files import RepoFilesView
28 28 from rhodecode.lib import helpers as h
29 29 from rhodecode.lib.compat import OrderedDict
30 30 from rhodecode.lib.ext_json import json
31 31 from rhodecode.lib.vcs import nodes
32 32
33 33 from rhodecode.lib.vcs.conf import settings
34 34 from rhodecode.tests import assert_session_flash
35 35 from rhodecode.tests.fixture import Fixture
36 36 from rhodecode.model.db import Session
37 37
38 38 fixture = Fixture()
39 39
40 40
41 41 def get_node_history(backend_type):
42 42 return {
43 43 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
44 44 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
45 45 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
46 46 }[backend_type]
47 47
48 48
49 49 def route_path(name, params=None, **kwargs):
50 50 import urllib
51 51
52 52 base_url = {
53 53 'repo_summary': '/{repo_name}',
54 54 'repo_archivefile': '/{repo_name}/archive/{fname}',
55 55 'repo_files_diff': '/{repo_name}/diff/{f_path}',
56 56 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
57 57 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
58 58 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
59 59 'repo_files:default_commit': '/{repo_name}/files',
60 60 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
61 61 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
62 62 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
63 63 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
64 64 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
65 65 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
66 66 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
67 67 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
68 68 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
69 69 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
70 70 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
71 71 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
72 72 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
73 73 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
74 74 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
75 75 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
76 76 }[name].format(**kwargs)
77 77
78 78 if params:
79 79 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
80 80 return base_url
81 81
82 82
83 83 def assert_files_in_response(response, files, params):
84 84 template = (
85 85 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
86 86 _assert_items_in_response(response, files, template, params)
87 87
88 88
89 89 def assert_dirs_in_response(response, dirs, params):
90 90 template = (
91 91 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
92 92 _assert_items_in_response(response, dirs, template, params)
93 93
94 94
95 95 def _assert_items_in_response(response, items, template, params):
96 96 for item in items:
97 97 item_params = {'name': item}
98 98 item_params.update(params)
99 99 response.mustcontain(template % item_params)
100 100
101 101
102 102 def assert_timeago_in_response(response, items, params):
103 103 for item in items:
104 104 response.mustcontain(h.age_component(params['date']))
105 105
106 106
107 107 @pytest.mark.usefixtures("app")
108 108 class TestFilesViews(object):
109 109
110 110 def test_show_files(self, backend):
111 111 response = self.app.get(
112 112 route_path('repo_files',
113 113 repo_name=backend.repo_name,
114 114 commit_id='tip', f_path='/'))
115 115 commit = backend.repo.get_commit()
116 116
117 117 params = {
118 118 'repo_name': backend.repo_name,
119 119 'commit_id': commit.raw_id,
120 120 'date': commit.date
121 121 }
122 122 assert_dirs_in_response(response, ['docs', 'vcs'], params)
123 123 files = [
124 124 '.gitignore',
125 125 '.hgignore',
126 126 '.hgtags',
127 127 # TODO: missing in Git
128 128 # '.travis.yml',
129 129 'MANIFEST.in',
130 130 'README.rst',
131 131 # TODO: File is missing in svn repository
132 132 # 'run_test_and_report.sh',
133 133 'setup.cfg',
134 134 'setup.py',
135 135 'test_and_report.sh',
136 136 'tox.ini',
137 137 ]
138 138 assert_files_in_response(response, files, params)
139 139 assert_timeago_in_response(response, files, params)
140 140
141 141 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
142 142 repo = backend_hg['subrepos']
143 143 response = self.app.get(
144 144 route_path('repo_files',
145 145 repo_name=repo.repo_name,
146 146 commit_id='tip', f_path='/'))
147 147 assert_response = response.assert_response()
148 148 assert_response.contains_one_link(
149 149 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
150 150
151 151 def test_show_files_links_submodules_with_absolute_url_subpaths(
152 152 self, backend_hg):
153 153 repo = backend_hg['subrepos']
154 154 response = self.app.get(
155 155 route_path('repo_files',
156 156 repo_name=repo.repo_name,
157 157 commit_id='tip', f_path='/'))
158 158 assert_response = response.assert_response()
159 159 assert_response.contains_one_link(
160 160 'subpaths-path @ 000000000000',
161 161 'http://sub-base.example.com/subpaths-path')
162 162
163 163 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
164 164 def test_files_menu(self, backend):
165 165 new_branch = "temp_branch_name"
166 166 commits = [
167 167 {'message': 'a'},
168 168 {'message': 'b', 'branch': new_branch}
169 169 ]
170 170 backend.create_repo(commits)
171 171 backend.repo.landing_rev = "branch:%s" % new_branch
172 172 Session().commit()
173 173
174 174 # get response based on tip and not new commit
175 175 response = self.app.get(
176 176 route_path('repo_files',
177 177 repo_name=backend.repo_name,
178 178 commit_id='tip', f_path='/'))
179 179
180 180 # make sure Files menu url is not tip but new commit
181 181 landing_rev = backend.repo.landing_ref_name
182 182 files_url = route_path('repo_files:default_path',
183 183 repo_name=backend.repo_name,
184 184 commit_id=landing_rev, params={'at': landing_rev})
185 185
186 186 assert landing_rev != 'tip'
187 187 response.mustcontain(
188 188 '<li class="active"><a class="menulink" href="%s">' % files_url)
189 189
190 190 def test_show_files_commit(self, backend):
191 191 commit = backend.repo.get_commit(commit_idx=32)
192 192
193 193 response = self.app.get(
194 194 route_path('repo_files',
195 195 repo_name=backend.repo_name,
196 196 commit_id=commit.raw_id, f_path='/'))
197 197
198 198 dirs = ['docs', 'tests']
199 199 files = ['README.rst']
200 200 params = {
201 201 'repo_name': backend.repo_name,
202 202 'commit_id': commit.raw_id,
203 203 }
204 204 assert_dirs_in_response(response, dirs, params)
205 205 assert_files_in_response(response, files, params)
206 206
207 207 def test_show_files_different_branch(self, backend):
208 208 branches = dict(
209 209 hg=(150, ['git']),
210 210 # TODO: Git test repository does not contain other branches
211 211 git=(633, ['master']),
212 212 # TODO: Branch support in Subversion
213 213 svn=(150, [])
214 214 )
215 215 idx, branches = branches[backend.alias]
216 216 commit = backend.repo.get_commit(commit_idx=idx)
217 217 response = self.app.get(
218 218 route_path('repo_files',
219 219 repo_name=backend.repo_name,
220 220 commit_id=commit.raw_id, f_path='/'))
221 221
222 222 assert_response = response.assert_response()
223 223 for branch in branches:
224 224 assert_response.element_contains('.tags .branchtag', branch)
225 225
226 226 def test_show_files_paging(self, backend):
227 227 repo = backend.repo
228 228 indexes = [73, 92, 109, 1, 0]
229 229 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
230 230 for rev in indexes]
231 231
232 232 for idx in idx_map:
233 233 response = self.app.get(
234 234 route_path('repo_files',
235 235 repo_name=backend.repo_name,
236 236 commit_id=idx[1], f_path='/'))
237 237
238 238 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
239 239
240 240 def test_file_source(self, backend):
241 241 commit = backend.repo.get_commit(commit_idx=167)
242 242 response = self.app.get(
243 243 route_path('repo_files',
244 244 repo_name=backend.repo_name,
245 245 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
246 246
247 247 msgbox = """<div class="commit">%s</div>"""
248 248 response.mustcontain(msgbox % (commit.message, ))
249 249
250 250 assert_response = response.assert_response()
251 251 if commit.branch:
252 252 assert_response.element_contains(
253 253 '.tags.tags-main .branchtag', commit.branch)
254 254 if commit.tags:
255 255 for tag in commit.tags:
256 256 assert_response.element_contains('.tags.tags-main .tagtag', tag)
257 257
258 258 def test_file_source_annotated(self, backend):
259 259 response = self.app.get(
260 260 route_path('repo_files:annotated',
261 261 repo_name=backend.repo_name,
262 262 commit_id='tip', f_path='vcs/nodes.py'))
263 263 expected_commits = {
264 264 'hg': 'r356',
265 265 'git': 'r345',
266 266 'svn': 'r208',
267 267 }
268 268 response.mustcontain(expected_commits[backend.alias])
269 269
270 270 def test_file_source_authors(self, backend):
271 271 response = self.app.get(
272 272 route_path('repo_file_authors',
273 273 repo_name=backend.repo_name,
274 274 commit_id='tip', f_path='vcs/nodes.py'))
275 275 expected_authors = {
276 276 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
277 277 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
278 278 'svn': ('marcin', 'lukasz'),
279 279 }
280 280
281 281 for author in expected_authors[backend.alias]:
282 282 response.mustcontain(author)
283 283
284 284 def test_file_source_authors_with_annotation(self, backend):
285 285 response = self.app.get(
286 286 route_path('repo_file_authors',
287 287 repo_name=backend.repo_name,
288 288 commit_id='tip', f_path='vcs/nodes.py',
289 289 params=dict(annotate=1)))
290 290 expected_authors = {
291 291 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
292 292 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
293 293 'svn': ('marcin', 'lukasz'),
294 294 }
295 295
296 296 for author in expected_authors[backend.alias]:
297 297 response.mustcontain(author)
298 298
299 299 def test_file_source_history(self, backend, xhr_header):
300 300 response = self.app.get(
301 301 route_path('repo_file_history',
302 302 repo_name=backend.repo_name,
303 303 commit_id='tip', f_path='vcs/nodes.py'),
304 304 extra_environ=xhr_header)
305 305 assert get_node_history(backend.alias) == json.loads(response.body)
306 306
307 307 def test_file_source_history_svn(self, backend_svn, xhr_header):
308 308 simple_repo = backend_svn['svn-simple-layout']
309 309 response = self.app.get(
310 310 route_path('repo_file_history',
311 311 repo_name=simple_repo.repo_name,
312 312 commit_id='tip', f_path='trunk/example.py'),
313 313 extra_environ=xhr_header)
314 314
315 315 expected_data = json.loads(
316 316 fixture.load_resource('svn_node_history_branches.json'))
317 317
318 318 assert expected_data == response.json
319 319
320 320 def test_file_source_history_with_annotation(self, backend, xhr_header):
321 321 response = self.app.get(
322 322 route_path('repo_file_history',
323 323 repo_name=backend.repo_name,
324 324 commit_id='tip', f_path='vcs/nodes.py',
325 325 params=dict(annotate=1)),
326 326
327 327 extra_environ=xhr_header)
328 328 assert get_node_history(backend.alias) == json.loads(response.body)
329 329
330 330 def test_tree_search_top_level(self, backend, xhr_header):
331 331 commit = backend.repo.get_commit(commit_idx=173)
332 332 response = self.app.get(
333 333 route_path('repo_files_nodelist',
334 334 repo_name=backend.repo_name,
335 335 commit_id=commit.raw_id, f_path='/'),
336 336 extra_environ=xhr_header)
337 337 assert 'nodes' in response.json
338 338 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
339 339
340 340 def test_tree_search_missing_xhr(self, backend):
341 341 self.app.get(
342 342 route_path('repo_files_nodelist',
343 343 repo_name=backend.repo_name,
344 344 commit_id='tip', f_path='/'),
345 345 status=404)
346 346
347 347 def test_tree_search_at_path(self, backend, xhr_header):
348 348 commit = backend.repo.get_commit(commit_idx=173)
349 349 response = self.app.get(
350 350 route_path('repo_files_nodelist',
351 351 repo_name=backend.repo_name,
352 352 commit_id=commit.raw_id, f_path='/docs'),
353 353 extra_environ=xhr_header)
354 354 assert 'nodes' in response.json
355 355 nodes = response.json['nodes']
356 356 assert {'name': 'docs/api', 'type': 'dir'} in nodes
357 357 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
358 358
359 359 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
360 360 commit = backend.repo.get_commit(commit_idx=173)
361 361 response = self.app.get(
362 362 route_path('repo_files_nodelist',
363 363 repo_name=backend.repo_name,
364 364 commit_id=commit.raw_id, f_path='/docs/api'),
365 365 extra_environ=xhr_header)
366 366 assert 'nodes' in response.json
367 367 nodes = response.json['nodes']
368 368 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
369 369
370 370 def test_tree_search_at_path_missing_xhr(self, backend):
371 371 self.app.get(
372 372 route_path('repo_files_nodelist',
373 373 repo_name=backend.repo_name,
374 374 commit_id='tip', f_path='/docs'),
375 375 status=404)
376 376
377 377 def test_nodetree(self, backend, xhr_header):
378 378 commit = backend.repo.get_commit(commit_idx=173)
379 379 response = self.app.get(
380 380 route_path('repo_nodetree_full',
381 381 repo_name=backend.repo_name,
382 382 commit_id=commit.raw_id, f_path='/'),
383 383 extra_environ=xhr_header)
384 384
385 385 assert_response = response.assert_response()
386 386
387 387 for attr in ['data-commit-id', 'data-date', 'data-author']:
388 388 elements = assert_response.get_elements('[{}]'.format(attr))
389 389 assert len(elements) > 1
390 390
391 391 for element in elements:
392 392 assert element.get(attr)
393 393
394 394 def test_nodetree_if_file(self, backend, xhr_header):
395 395 commit = backend.repo.get_commit(commit_idx=173)
396 396 response = self.app.get(
397 397 route_path('repo_nodetree_full',
398 398 repo_name=backend.repo_name,
399 399 commit_id=commit.raw_id, f_path='README.rst'),
400 400 extra_environ=xhr_header)
401 401 assert response.body == ''
402 402
403 403 def test_nodetree_wrong_path(self, backend, xhr_header):
404 404 commit = backend.repo.get_commit(commit_idx=173)
405 405 response = self.app.get(
406 406 route_path('repo_nodetree_full',
407 407 repo_name=backend.repo_name,
408 408 commit_id=commit.raw_id, f_path='/dont-exist'),
409 409 extra_environ=xhr_header)
410 410
411 411 err = 'error: There is no file nor ' \
412 412 'directory at the given path'
413 413 assert err in response.body
414 414
415 415 def test_nodetree_missing_xhr(self, backend):
416 416 self.app.get(
417 417 route_path('repo_nodetree_full',
418 418 repo_name=backend.repo_name,
419 419 commit_id='tip', f_path='/'),
420 420 status=404)
421 421
422 422
423 423 @pytest.mark.usefixtures("app", "autologin_user")
424 424 class TestRawFileHandling(object):
425 425
426 426 def test_download_file(self, backend):
427 427 commit = backend.repo.get_commit(commit_idx=173)
428 428 response = self.app.get(
429 429 route_path('repo_file_download',
430 430 repo_name=backend.repo_name,
431 431 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
432 432
433 433 assert response.content_disposition == 'attachment; filename="nodes.py"; filename*=UTF-8\'\'nodes.py'
434 434 assert response.content_type == "text/x-python"
435 435
436 436 def test_download_file_wrong_cs(self, backend):
437 437 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
438 438
439 439 response = self.app.get(
440 440 route_path('repo_file_download',
441 441 repo_name=backend.repo_name,
442 442 commit_id=raw_id, f_path='vcs/nodes.svg'),
443 443 status=404)
444 444
445 445 msg = """No such commit exists for this repository"""
446 446 response.mustcontain(msg)
447 447
448 448 def test_download_file_wrong_f_path(self, backend):
449 449 commit = backend.repo.get_commit(commit_idx=173)
450 450 f_path = 'vcs/ERRORnodes.py'
451 451
452 452 response = self.app.get(
453 453 route_path('repo_file_download',
454 454 repo_name=backend.repo_name,
455 455 commit_id=commit.raw_id, f_path=f_path),
456 456 status=404)
457 457
458 458 msg = (
459 459 "There is no file nor directory at the given path: "
460 460 "`%s` at commit %s" % (f_path, commit.short_id))
461 461 response.mustcontain(msg)
462 462
463 463 def test_file_raw(self, backend):
464 464 commit = backend.repo.get_commit(commit_idx=173)
465 465 response = self.app.get(
466 466 route_path('repo_file_raw',
467 467 repo_name=backend.repo_name,
468 468 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
469 469
470 470 assert response.content_type == "text/plain"
471 471
472 472 def test_file_raw_binary(self, backend):
473 473 commit = backend.repo.get_commit()
474 474 response = self.app.get(
475 475 route_path('repo_file_raw',
476 476 repo_name=backend.repo_name,
477 477 commit_id=commit.raw_id,
478 478 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
479 479
480 480 assert response.content_disposition == 'inline'
481 481
482 482 def test_raw_file_wrong_cs(self, backend):
483 483 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
484 484
485 485 response = self.app.get(
486 486 route_path('repo_file_raw',
487 487 repo_name=backend.repo_name,
488 488 commit_id=raw_id, f_path='vcs/nodes.svg'),
489 489 status=404)
490 490
491 491 msg = """No such commit exists for this repository"""
492 492 response.mustcontain(msg)
493 493
494 494 def test_raw_wrong_f_path(self, backend):
495 495 commit = backend.repo.get_commit(commit_idx=173)
496 496 f_path = 'vcs/ERRORnodes.py'
497 497 response = self.app.get(
498 498 route_path('repo_file_raw',
499 499 repo_name=backend.repo_name,
500 500 commit_id=commit.raw_id, f_path=f_path),
501 501 status=404)
502 502
503 503 msg = (
504 504 "There is no file nor directory at the given path: "
505 505 "`%s` at commit %s" % (f_path, commit.short_id))
506 506 response.mustcontain(msg)
507 507
508 508 def test_raw_svg_should_not_be_rendered(self, backend):
509 509 backend.create_repo()
510 510 backend.ensure_file("xss.svg")
511 511 response = self.app.get(
512 512 route_path('repo_file_raw',
513 513 repo_name=backend.repo_name,
514 514 commit_id='tip', f_path='xss.svg'),)
515 515 # If the content type is image/svg+xml then it allows to render HTML
516 516 # and malicious SVG.
517 517 assert response.content_type == "text/plain"
518 518
519 519
520 520 @pytest.mark.usefixtures("app")
521 521 class TestRepositoryArchival(object):
522 522
523 523 def test_archival(self, backend):
524 524 backend.enable_downloads()
525 525 commit = backend.repo.get_commit(commit_idx=173)
526 526 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
527 527
528 528 short = commit.short_id + extension
529 529 fname = commit.raw_id + extension
530 530 filename = '%s-%s' % (backend.repo_name, short)
531 531 response = self.app.get(
532 532 route_path('repo_archivefile',
533 533 repo_name=backend.repo_name,
534 534 fname=fname))
535 535
536 536 assert response.status == '200 OK'
537 537 headers = [
538 538 ('Content-Disposition', 'attachment; filename=%s' % filename),
539 539 ('Content-Type', '%s' % content_type),
540 540 ]
541 541
542 542 for header in headers:
543 543 assert header in response.headers.items()
544 544
545 545 def test_archival_no_hash(self, backend):
546 546 backend.enable_downloads()
547 547 commit = backend.repo.get_commit(commit_idx=173)
548 548 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
549 549
550 550 short = 'plain' + extension
551 551 fname = commit.raw_id + extension
552 552 filename = '%s-%s' % (backend.repo_name, short)
553 553 response = self.app.get(
554 554 route_path('repo_archivefile',
555 555 repo_name=backend.repo_name,
556 556 fname=fname, params={'with_hash': 0}))
557 557
558 558 assert response.status == '200 OK'
559 559 headers = [
560 560 ('Content-Disposition', 'attachment; filename=%s' % filename),
561 561 ('Content-Type', '%s' % content_type),
562 562 ]
563 563
564 564 for header in headers:
565 565 assert header in response.headers.items()
566 566
567 567 @pytest.mark.parametrize('arch_ext',[
568 568 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
569 569 def test_archival_wrong_ext(self, backend, arch_ext):
570 570 backend.enable_downloads()
571 571 commit = backend.repo.get_commit(commit_idx=173)
572 572
573 573 fname = commit.raw_id + '.' + arch_ext
574 574
575 575 response = self.app.get(
576 576 route_path('repo_archivefile',
577 577 repo_name=backend.repo_name,
578 578 fname=fname))
579 579 response.mustcontain(
580 580 'Unknown archive type for: `{}`'.format(fname))
581 581
582 582 @pytest.mark.parametrize('commit_id', [
583 583 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
584 584 def test_archival_wrong_commit_id(self, backend, commit_id):
585 585 backend.enable_downloads()
586 586 fname = '%s.zip' % commit_id
587 587
588 588 response = self.app.get(
589 589 route_path('repo_archivefile',
590 590 repo_name=backend.repo_name,
591 591 fname=fname))
592 592 response.mustcontain('Unknown commit_id')
593 593
594 594
595 595 @pytest.mark.usefixtures("app")
596 596 class TestFilesDiff(object):
597 597
598 598 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
599 599 def test_file_full_diff(self, backend, diff):
600 600 commit1 = backend.repo.get_commit(commit_idx=-1)
601 601 commit2 = backend.repo.get_commit(commit_idx=-2)
602 602
603 603 response = self.app.get(
604 604 route_path('repo_files_diff',
605 605 repo_name=backend.repo_name,
606 606 f_path='README'),
607 607 params={
608 608 'diff1': commit2.raw_id,
609 609 'diff2': commit1.raw_id,
610 610 'fulldiff': '1',
611 611 'diff': diff,
612 612 })
613 613
614 614 if diff == 'diff':
615 615 # use redirect since this is OLD view redirecting to compare page
616 616 response = response.follow()
617 617
618 618 # It's a symlink to README.rst
619 619 response.mustcontain('README.rst')
620 620 response.mustcontain('No newline at end of file')
621 621
622 622 def test_file_binary_diff(self, backend):
623 623 commits = [
624 624 {'message': 'First commit'},
625 625 {'message': 'Commit with binary',
626 626 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
627 627 ]
628 628 repo = backend.create_repo(commits=commits)
629 629
630 630 response = self.app.get(
631 631 route_path('repo_files_diff',
632 632 repo_name=backend.repo_name,
633 633 f_path='file.bin'),
634 634 params={
635 635 'diff1': repo.get_commit(commit_idx=0).raw_id,
636 636 'diff2': repo.get_commit(commit_idx=1).raw_id,
637 637 'fulldiff': '1',
638 638 'diff': 'diff',
639 639 })
640 640 # use redirect since this is OLD view redirecting to compare page
641 641 response = response.follow()
642 642 response.mustcontain('Collapse 1 commit')
643 643 file_changes = (1, 0, 0)
644 644
645 645 compare_page = ComparePage(response)
646 646 compare_page.contains_change_summary(*file_changes)
647 647
648 648 if backend.alias == 'svn':
649 649 response.mustcontain('new file 10644')
650 650 # TODO(marcink): SVN doesn't yet detect binary changes
651 651 else:
652 652 response.mustcontain('new file 100644')
653 653 response.mustcontain('binary diff hidden')
654 654
655 655 def test_diff_2way(self, backend):
656 656 commit1 = backend.repo.get_commit(commit_idx=-1)
657 657 commit2 = backend.repo.get_commit(commit_idx=-2)
658 658 response = self.app.get(
659 659 route_path('repo_files_diff_2way_redirect',
660 660 repo_name=backend.repo_name,
661 661 f_path='README'),
662 662 params={
663 663 'diff1': commit2.raw_id,
664 664 'diff2': commit1.raw_id,
665 665 })
666 666 # use redirect since this is OLD view redirecting to compare page
667 667 response = response.follow()
668 668
669 669 # It's a symlink to README.rst
670 670 response.mustcontain('README.rst')
671 671 response.mustcontain('No newline at end of file')
672 672
673 673 def test_requires_one_commit_id(self, backend, autologin_user):
674 674 response = self.app.get(
675 675 route_path('repo_files_diff',
676 676 repo_name=backend.repo_name,
677 677 f_path='README.rst'),
678 678 status=400)
679 679 response.mustcontain(
680 680 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
681 681
682 682 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
683 683 repo = vcsbackend.repo
684 684 response = self.app.get(
685 685 route_path('repo_files_diff',
686 686 repo_name=repo.name,
687 687 f_path='does-not-exist-in-any-commit'),
688 688 params={
689 689 'diff1': repo[0].raw_id,
690 690 'diff2': repo[1].raw_id
691 691 })
692 692
693 693 response = response.follow()
694 694 response.mustcontain('No files')
695 695
696 696 def test_returns_redirect_if_file_not_changed(self, backend):
697 697 commit = backend.repo.get_commit(commit_idx=-1)
698 698 response = self.app.get(
699 699 route_path('repo_files_diff_2way_redirect',
700 700 repo_name=backend.repo_name,
701 701 f_path='README'),
702 702 params={
703 703 'diff1': commit.raw_id,
704 704 'diff2': commit.raw_id,
705 705 })
706 706
707 707 response = response.follow()
708 708 response.mustcontain('No files')
709 709 response.mustcontain('No commits in this compare')
710 710
711 711 def test_supports_diff_to_different_path_svn(self, backend_svn):
712 712 #TODO: check this case
713 713 return
714 714
715 715 repo = backend_svn['svn-simple-layout'].scm_instance()
716 716 commit_id_1 = '24'
717 717 commit_id_2 = '26'
718 718
719 719 response = self.app.get(
720 720 route_path('repo_files_diff',
721 721 repo_name=backend_svn.repo_name,
722 722 f_path='trunk/example.py'),
723 723 params={
724 724 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
725 725 'diff2': commit_id_2,
726 726 })
727 727
728 728 response = response.follow()
729 729 response.mustcontain(
730 730 # diff contains this
731 731 "Will print out a useful message on invocation.")
732 732
733 733 # Note: Expecting that we indicate the user what's being compared
734 734 response.mustcontain("trunk/example.py")
735 735 response.mustcontain("tags/v0.2/example.py")
736 736
737 737 def test_show_rev_redirects_to_svn_path(self, backend_svn):
738 738 #TODO: check this case
739 739 return
740 740
741 741 repo = backend_svn['svn-simple-layout'].scm_instance()
742 742 commit_id = repo[-1].raw_id
743 743
744 744 response = self.app.get(
745 745 route_path('repo_files_diff',
746 746 repo_name=backend_svn.repo_name,
747 747 f_path='trunk/example.py'),
748 748 params={
749 749 'diff1': 'branches/argparse/example.py@' + commit_id,
750 750 'diff2': commit_id,
751 751 },
752 752 status=302)
753 753 response = response.follow()
754 754 assert response.headers['Location'].endswith(
755 755 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
756 756
757 757 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
758 758 #TODO: check this case
759 759 return
760 760
761 761 repo = backend_svn['svn-simple-layout'].scm_instance()
762 762 commit_id = repo[-1].raw_id
763 763 response = self.app.get(
764 764 route_path('repo_files_diff',
765 765 repo_name=backend_svn.repo_name,
766 766 f_path='trunk/example.py'),
767 767 params={
768 768 'diff1': 'branches/argparse/example.py@' + commit_id,
769 769 'diff2': commit_id,
770 770 'show_rev': 'Show at Revision',
771 771 'annotate': 'true',
772 772 },
773 773 status=302)
774 774 response = response.follow()
775 775 assert response.headers['Location'].endswith(
776 776 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
777 777
778 778
779 779 @pytest.mark.usefixtures("app", "autologin_user")
780 780 class TestModifyFilesWithWebInterface(object):
781 781
782 782 def test_add_file_view(self, backend):
783 783 self.app.get(
784 784 route_path('repo_files_add_file',
785 785 repo_name=backend.repo_name,
786 786 commit_id='tip', f_path='/')
787 787 )
788 788
789 789 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
790 790 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
791 791 backend.create_repo()
792 792 filename = 'init.py'
793 793 response = self.app.post(
794 794 route_path('repo_files_create_file',
795 795 repo_name=backend.repo_name,
796 796 commit_id='tip', f_path='/'),
797 797 params={
798 798 'content': "",
799 799 'filename': filename,
800 800 'csrf_token': csrf_token,
801 801 },
802 802 status=302)
803 803 expected_msg = 'Successfully committed new file `{}`'.format(os.path.join(filename))
804 804 assert_session_flash(response, expected_msg)
805 805
806 806 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
807 807 commit_id = backend.repo.get_commit().raw_id
808 808 response = self.app.post(
809 809 route_path('repo_files_create_file',
810 810 repo_name=backend.repo_name,
811 811 commit_id=commit_id, f_path='/'),
812 812 params={
813 813 'content': "foo",
814 814 'csrf_token': csrf_token,
815 815 },
816 816 status=302)
817 817
818 818 assert_session_flash(response, 'No filename specified')
819 819
820 820 def test_add_file_into_repo_errors_and_no_commits(
821 821 self, backend, csrf_token):
822 822 repo = backend.create_repo()
823 823 # Create a file with no filename, it will display an error but
824 824 # the repo has no commits yet
825 825 response = self.app.post(
826 826 route_path('repo_files_create_file',
827 827 repo_name=repo.repo_name,
828 828 commit_id='tip', f_path='/'),
829 829 params={
830 830 'content': "foo",
831 831 'csrf_token': csrf_token,
832 832 },
833 833 status=302)
834 834
835 835 assert_session_flash(response, 'No filename specified')
836 836
837 837 # Not allowed, redirect to the summary
838 838 redirected = response.follow()
839 839 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
840 840
841 841 # As there are no commits, displays the summary page with the error of
842 842 # creating a file with no filename
843 843
844 844 assert redirected.request.path == summary_url
845 845
846 846 @pytest.mark.parametrize("filename, clean_filename", [
847 847 ('/abs/foo', 'abs/foo'),
848 848 ('../rel/foo', 'rel/foo'),
849 849 ('file/../foo/foo', 'file/foo/foo'),
850 850 ])
851 851 def test_add_file_into_repo_bad_filenames(self, filename, clean_filename, backend, csrf_token):
852 852 repo = backend.create_repo()
853 853 commit_id = repo.get_commit().raw_id
854 854
855 855 response = self.app.post(
856 856 route_path('repo_files_create_file',
857 857 repo_name=repo.repo_name,
858 858 commit_id=commit_id, f_path='/'),
859 859 params={
860 860 'content': "foo",
861 861 'filename': filename,
862 862 'csrf_token': csrf_token,
863 863 },
864 864 status=302)
865 865
866 866 expected_msg = 'Successfully committed new file `{}`'.format(clean_filename)
867 867 assert_session_flash(response, expected_msg)
868 868
869 869 @pytest.mark.parametrize("cnt, filename, content", [
870 870 (1, 'foo.txt', "Content"),
871 871 (2, 'dir/foo.rst', "Content"),
872 872 (3, 'dir/foo-second.rst', "Content"),
873 873 (4, 'rel/dir/foo.bar', "Content"),
874 874 ])
875 875 def test_add_file_into_empty_repo(self, cnt, filename, content, backend, csrf_token):
876 876 repo = backend.create_repo()
877 877 commit_id = repo.get_commit().raw_id
878 878 response = self.app.post(
879 879 route_path('repo_files_create_file',
880 880 repo_name=repo.repo_name,
881 881 commit_id=commit_id, f_path='/'),
882 882 params={
883 883 'content': content,
884 884 'filename': filename,
885 885 'csrf_token': csrf_token,
886 886 },
887 887 status=302)
888 888
889 889 expected_msg = 'Successfully committed new file `{}`'.format(filename)
890 890 assert_session_flash(response, expected_msg)
891 891
892 892 def test_edit_file_view(self, backend):
893 893 response = self.app.get(
894 894 route_path('repo_files_edit_file',
895 895 repo_name=backend.repo_name,
896 896 commit_id=backend.default_head_id,
897 897 f_path='vcs/nodes.py'),
898 898 status=200)
899 899 response.mustcontain("Module holding everything related to vcs nodes.")
900 900
901 901 def test_edit_file_view_not_on_branch(self, backend):
902 902 repo = backend.create_repo()
903 903 backend.ensure_file("vcs/nodes.py")
904 904
905 905 response = self.app.get(
906 906 route_path('repo_files_edit_file',
907 907 repo_name=repo.repo_name,
908 908 commit_id='tip',
909 909 f_path='vcs/nodes.py'),
910 910 status=302)
911 911 assert_session_flash(
912 912 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
913 913
914 914 def test_edit_file_view_commit_changes(self, backend, csrf_token):
915 915 repo = backend.create_repo()
916 916 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
917 917
918 918 response = self.app.post(
919 919 route_path('repo_files_update_file',
920 920 repo_name=repo.repo_name,
921 921 commit_id=backend.default_head_id,
922 922 f_path='vcs/nodes.py'),
923 923 params={
924 924 'content': "print 'hello world'",
925 925 'message': 'I committed',
926 926 'filename': "vcs/nodes.py",
927 927 'csrf_token': csrf_token,
928 928 },
929 929 status=302)
930 930 assert_session_flash(
931 931 response, 'Successfully committed changes to file `vcs/nodes.py`')
932 932 tip = repo.get_commit(commit_idx=-1)
933 933 assert tip.message == 'I committed'
934 934
935 935 def test_edit_file_view_commit_changes_default_message(self, backend,
936 936 csrf_token):
937 937 repo = backend.create_repo()
938 938 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
939 939
940 940 commit_id = (
941 941 backend.default_branch_name or
942 942 backend.repo.scm_instance().commit_ids[-1])
943 943
944 944 response = self.app.post(
945 945 route_path('repo_files_update_file',
946 946 repo_name=repo.repo_name,
947 947 commit_id=commit_id,
948 948 f_path='vcs/nodes.py'),
949 949 params={
950 950 'content': "print 'hello world'",
951 951 'message': '',
952 952 'filename': "vcs/nodes.py",
953 953 'csrf_token': csrf_token,
954 954 },
955 955 status=302)
956 956 assert_session_flash(
957 957 response, 'Successfully committed changes to file `vcs/nodes.py`')
958 958 tip = repo.get_commit(commit_idx=-1)
959 959 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
960 960
961 961 def test_delete_file_view(self, backend):
962 962 self.app.get(
963 963 route_path('repo_files_remove_file',
964 964 repo_name=backend.repo_name,
965 965 commit_id=backend.default_head_id,
966 966 f_path='vcs/nodes.py'),
967 967 status=200)
968 968
969 969 def test_delete_file_view_not_on_branch(self, backend):
970 970 repo = backend.create_repo()
971 971 backend.ensure_file('vcs/nodes.py')
972 972
973 973 response = self.app.get(
974 974 route_path('repo_files_remove_file',
975 975 repo_name=repo.repo_name,
976 976 commit_id='tip',
977 977 f_path='vcs/nodes.py'),
978 978 status=302)
979 979 assert_session_flash(
980 980 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
981 981
982 982 def test_delete_file_view_commit_changes(self, backend, csrf_token):
983 983 repo = backend.create_repo()
984 984 backend.ensure_file("vcs/nodes.py")
985 985
986 986 response = self.app.post(
987 987 route_path('repo_files_delete_file',
988 988 repo_name=repo.repo_name,
989 989 commit_id=backend.default_head_id,
990 990 f_path='vcs/nodes.py'),
991 991 params={
992 'message': 'i commited',
992 'message': 'i committed',
993 993 'csrf_token': csrf_token,
994 994 },
995 995 status=302)
996 996 assert_session_flash(
997 997 response, 'Successfully deleted file `vcs/nodes.py`')
998 998
999 999
1000 1000 @pytest.mark.usefixtures("app")
1001 1001 class TestFilesViewOtherCases(object):
1002 1002
1003 1003 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
1004 1004 self, backend_stub, autologin_regular_user, user_regular,
1005 1005 user_util):
1006 1006
1007 1007 repo = backend_stub.create_repo()
1008 1008 user_util.grant_user_permission_to_repo(
1009 1009 repo, user_regular, 'repository.write')
1010 1010 response = self.app.get(
1011 1011 route_path('repo_files',
1012 1012 repo_name=repo.repo_name,
1013 1013 commit_id='tip', f_path='/'))
1014 1014
1015 1015 repo_file_add_url = route_path(
1016 1016 'repo_files_add_file',
1017 1017 repo_name=repo.repo_name,
1018 1018 commit_id=0, f_path='')
1019 1019
1020 1020 assert_session_flash(
1021 1021 response,
1022 1022 'There are no files yet. <a class="alert-link" '
1023 1023 'href="{}">Click here to add a new file.</a>'
1024 1024 .format(repo_file_add_url))
1025 1025
1026 1026 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1027 1027 self, backend_stub, autologin_regular_user):
1028 1028 repo = backend_stub.create_repo()
1029 1029 # init session for anon user
1030 1030 route_path('repo_summary', repo_name=repo.repo_name)
1031 1031
1032 1032 repo_file_add_url = route_path(
1033 1033 'repo_files_add_file',
1034 1034 repo_name=repo.repo_name,
1035 1035 commit_id=0, f_path='')
1036 1036
1037 1037 response = self.app.get(
1038 1038 route_path('repo_files',
1039 1039 repo_name=repo.repo_name,
1040 1040 commit_id='tip', f_path='/'))
1041 1041
1042 1042 assert_session_flash(response, no_=repo_file_add_url)
1043 1043
1044 1044 @pytest.mark.parametrize('file_node', [
1045 1045 'archive/file.zip',
1046 1046 'diff/my-file.txt',
1047 1047 'render.py',
1048 1048 'render',
1049 1049 'remove_file',
1050 1050 'remove_file/to-delete.txt',
1051 1051 ])
1052 1052 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1053 1053 backend.create_repo()
1054 1054 backend.ensure_file(file_node)
1055 1055
1056 1056 self.app.get(
1057 1057 route_path('repo_files',
1058 1058 repo_name=backend.repo_name,
1059 1059 commit_id='tip', f_path=file_node),
1060 1060 status=200)
1061 1061
1062 1062
1063 1063 class TestAdjustFilePathForSvn(object):
1064 1064 """
1065 1065 SVN specific adjustments of node history in RepoFilesView.
1066 1066 """
1067 1067
1068 1068 def test_returns_path_relative_to_matched_reference(self):
1069 1069 repo = self._repo(branches=['trunk'])
1070 1070 self.assert_file_adjustment('trunk/file', 'file', repo)
1071 1071
1072 1072 def test_does_not_modify_file_if_no_reference_matches(self):
1073 1073 repo = self._repo(branches=['trunk'])
1074 1074 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1075 1075
1076 1076 def test_does_not_adjust_partial_directory_names(self):
1077 1077 repo = self._repo(branches=['trun'])
1078 1078 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1079 1079
1080 1080 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1081 1081 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1082 1082 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1083 1083
1084 1084 def assert_file_adjustment(self, f_path, expected, repo):
1085 1085 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1086 1086 assert result == expected
1087 1087
1088 1088 def _repo(self, branches=None):
1089 1089 repo = mock.Mock()
1090 1090 repo.branches = OrderedDict((name, '0') for name in branches or [])
1091 1091 repo.tags = {}
1092 1092 return repo
@@ -1,1028 +1,1028 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import traceback
27 27 import logging
28 28 import cStringIO
29 29
30 30 from sqlalchemy import func
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 import rhodecode
34 34 from rhodecode.lib.vcs import get_backend
35 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 38 from rhodecode.lib import helpers as h, rc_cache
39 39 from rhodecode.lib.auth import (
40 40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 41 HasUserGroupPermissionAny)
42 42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 43 from rhodecode.lib import hooks_utils
44 44 from rhodecode.lib.utils import (
45 45 get_filesystem_repos, make_db_config)
46 46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 47 from rhodecode.lib.system_info import get_system_info
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 or_, false,
51 51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 52 PullRequest, FileStore)
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 class UserTemp(object):
60 60 def __init__(self, user_id):
61 61 self.user_id = user_id
62 62
63 63 def __repr__(self):
64 64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65 65
66 66
67 67 class RepoTemp(object):
68 68 def __init__(self, repo_id):
69 69 self.repo_id = repo_id
70 70
71 71 def __repr__(self):
72 72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73 73
74 74
75 75 class SimpleCachedRepoList(object):
76 76 """
77 77 Lighter version of of iteration of repos without the scm initialisation,
78 78 and with cache usage
79 79 """
80 80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 81 self.db_repo_list = db_repo_list
82 82 self.repos_path = repos_path
83 83 self.order_by = order_by
84 84 self.reversed = (order_by or '').startswith('-')
85 85 if not perm_set:
86 86 perm_set = ['repository.read', 'repository.write',
87 87 'repository.admin']
88 88 self.perm_set = perm_set
89 89
90 90 def __len__(self):
91 91 return len(self.db_repo_list)
92 92
93 93 def __repr__(self):
94 94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95 95
96 96 def __iter__(self):
97 97 for dbr in self.db_repo_list:
98 98 # check permission at this level
99 99 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 100 dbr.repo_name, 'SimpleCachedRepoList check')
101 101 if not has_perm:
102 102 continue
103 103
104 104 tmp_d = {
105 105 'name': dbr.repo_name,
106 106 'dbrepo': dbr.get_dict(),
107 107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 108 }
109 109 yield tmp_d
110 110
111 111
112 112 class _PermCheckIterator(object):
113 113
114 114 def __init__(
115 115 self, obj_list, obj_attr, perm_set, perm_checker,
116 116 extra_kwargs=None):
117 117 """
118 118 Creates iterator from given list of objects, additionally
119 119 checking permission for them from perm_set var
120 120
121 121 :param obj_list: list of db objects
122 122 :param obj_attr: attribute of object to pass into perm_checker
123 123 :param perm_set: list of permissions to check
124 124 :param perm_checker: callable to check permissions against
125 125 """
126 126 self.obj_list = obj_list
127 127 self.obj_attr = obj_attr
128 128 self.perm_set = perm_set
129 129 self.perm_checker = perm_checker(*self.perm_set)
130 130 self.extra_kwargs = extra_kwargs or {}
131 131
132 132 def __len__(self):
133 133 return len(self.obj_list)
134 134
135 135 def __repr__(self):
136 136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137 137
138 138 def __iter__(self):
139 139 for db_obj in self.obj_list:
140 140 # check permission at this level
141 141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
142 142 name = db_obj.__dict__.get(self.obj_attr, None)
143 143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
144 144 continue
145 145
146 146 yield db_obj
147 147
148 148
149 149 class RepoList(_PermCheckIterator):
150 150
151 151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 152 if not perm_set:
153 153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
154 154
155 155 super(RepoList, self).__init__(
156 156 obj_list=db_repo_list,
157 157 obj_attr='_repo_name', perm_set=perm_set,
158 158 perm_checker=HasRepoPermissionAny,
159 159 extra_kwargs=extra_kwargs)
160 160
161 161
162 162 class RepoGroupList(_PermCheckIterator):
163 163
164 164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 165 if not perm_set:
166 166 perm_set = ['group.read', 'group.write', 'group.admin']
167 167
168 168 super(RepoGroupList, self).__init__(
169 169 obj_list=db_repo_group_list,
170 170 obj_attr='_group_name', perm_set=perm_set,
171 171 perm_checker=HasRepoGroupPermissionAny,
172 172 extra_kwargs=extra_kwargs)
173 173
174 174
175 175 class UserGroupList(_PermCheckIterator):
176 176
177 177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 178 if not perm_set:
179 179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180 180
181 181 super(UserGroupList, self).__init__(
182 182 obj_list=db_user_group_list,
183 183 obj_attr='users_group_name', perm_set=perm_set,
184 184 perm_checker=HasUserGroupPermissionAny,
185 185 extra_kwargs=extra_kwargs)
186 186
187 187
188 188 class ScmModel(BaseModel):
189 189 """
190 190 Generic Scm Model
191 191 """
192 192
193 193 @LazyProperty
194 194 def repos_path(self):
195 195 """
196 196 Gets the repositories root path from database
197 197 """
198 198
199 199 settings_model = VcsSettingsModel(sa=self.sa)
200 200 return settings_model.get_repos_location()
201 201
202 202 def repo_scan(self, repos_path=None):
203 203 """
204 204 Listing of repositories in given path. This path should not be a
205 205 repository itself. Return a dictionary of repository objects
206 206
207 207 :param repos_path: path to directory containing repositories
208 208 """
209 209
210 210 if repos_path is None:
211 211 repos_path = self.repos_path
212 212
213 213 log.info('scanning for repositories in %s', repos_path)
214 214
215 215 config = make_db_config()
216 216 config.set('extensions', 'largefiles', '')
217 217 repos = {}
218 218
219 219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 220 # name need to be decomposed and put back together using the /
221 221 # since this is internal storage separator for rhodecode
222 222 name = Repository.normalize_repo_name(name)
223 223
224 224 try:
225 225 if name in repos:
226 226 raise RepositoryError('Duplicate repository name %s '
227 227 'found in %s' % (name, path))
228 228 elif path[0] in rhodecode.BACKENDS:
229 229 backend = get_backend(path[0])
230 230 repos[name] = backend(path[1], config=config,
231 231 with_wire={"cache": False})
232 232 except OSError:
233 233 continue
234 234 except RepositoryError:
235 235 log.exception('Failed to create a repo')
236 236 continue
237 237
238 238 log.debug('found %s paths with repositories', len(repos))
239 239 return repos
240 240
241 241 def get_repos(self, all_repos=None, sort_key=None):
242 242 """
243 243 Get all repositories from db and for each repo create it's
244 244 backend instance and fill that backed with information from database
245 245
246 246 :param all_repos: list of repository names as strings
247 247 give specific repositories list, good for filtering
248 248
249 249 :param sort_key: initial sorting of repositories
250 250 """
251 251 if all_repos is None:
252 252 all_repos = self.sa.query(Repository)\
253 253 .filter(Repository.group_id == None)\
254 254 .order_by(func.lower(Repository.repo_name)).all()
255 255 repo_iter = SimpleCachedRepoList(
256 256 all_repos, repos_path=self.repos_path, order_by=sort_key)
257 257 return repo_iter
258 258
259 259 def get_repo_groups(self, all_groups=None):
260 260 if all_groups is None:
261 261 all_groups = RepoGroup.query()\
262 262 .filter(RepoGroup.group_parent_id == None).all()
263 263 return [x for x in RepoGroupList(all_groups)]
264 264
265 265 def mark_for_invalidation(self, repo_name, delete=False):
266 266 """
267 267 Mark caches of this repo invalid in the database. `delete` flag
268 268 removes the cache entries
269 269
270 270 :param repo_name: the repo_name for which caches should be marked
271 271 invalid, or deleted
272 272 :param delete: delete the entry keys instead of setting bool
273 273 flag on them, and also purge caches used by the dogpile
274 274 """
275 275 repo = Repository.get_by_repo_name(repo_name)
276 276
277 277 if repo:
278 278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
279 279 repo_id=repo.repo_id)
280 280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
281 281
282 282 repo_id = repo.repo_id
283 283 config = repo._config
284 284 config.set('extensions', 'largefiles', '')
285 285 repo.update_commit_cache(config=config, cs_cache=None)
286 286 if delete:
287 287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
288 288 rc_cache.clear_cache_namespace(
289 289 'cache_repo', cache_namespace_uid, invalidate=True)
290 290
291 291 def toggle_following_repo(self, follow_repo_id, user_id):
292 292
293 293 f = self.sa.query(UserFollowing)\
294 294 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
295 295 .filter(UserFollowing.user_id == user_id).scalar()
296 296
297 297 if f is not None:
298 298 try:
299 299 self.sa.delete(f)
300 300 return
301 301 except Exception:
302 302 log.error(traceback.format_exc())
303 303 raise
304 304
305 305 try:
306 306 f = UserFollowing()
307 307 f.user_id = user_id
308 308 f.follows_repo_id = follow_repo_id
309 309 self.sa.add(f)
310 310 except Exception:
311 311 log.error(traceback.format_exc())
312 312 raise
313 313
314 314 def toggle_following_user(self, follow_user_id, user_id):
315 315 f = self.sa.query(UserFollowing)\
316 316 .filter(UserFollowing.follows_user_id == follow_user_id)\
317 317 .filter(UserFollowing.user_id == user_id).scalar()
318 318
319 319 if f is not None:
320 320 try:
321 321 self.sa.delete(f)
322 322 return
323 323 except Exception:
324 324 log.error(traceback.format_exc())
325 325 raise
326 326
327 327 try:
328 328 f = UserFollowing()
329 329 f.user_id = user_id
330 330 f.follows_user_id = follow_user_id
331 331 self.sa.add(f)
332 332 except Exception:
333 333 log.error(traceback.format_exc())
334 334 raise
335 335
336 336 def is_following_repo(self, repo_name, user_id, cache=False):
337 337 r = self.sa.query(Repository)\
338 338 .filter(Repository.repo_name == repo_name).scalar()
339 339
340 340 f = self.sa.query(UserFollowing)\
341 341 .filter(UserFollowing.follows_repository == r)\
342 342 .filter(UserFollowing.user_id == user_id).scalar()
343 343
344 344 return f is not None
345 345
346 346 def is_following_user(self, username, user_id, cache=False):
347 347 u = User.get_by_username(username)
348 348
349 349 f = self.sa.query(UserFollowing)\
350 350 .filter(UserFollowing.follows_user == u)\
351 351 .filter(UserFollowing.user_id == user_id).scalar()
352 352
353 353 return f is not None
354 354
355 355 def get_followers(self, repo):
356 356 repo = self._get_repo(repo)
357 357
358 358 return self.sa.query(UserFollowing)\
359 359 .filter(UserFollowing.follows_repository == repo).count()
360 360
361 361 def get_forks(self, repo):
362 362 repo = self._get_repo(repo)
363 363 return self.sa.query(Repository)\
364 364 .filter(Repository.fork == repo).count()
365 365
366 366 def get_pull_requests(self, repo):
367 367 repo = self._get_repo(repo)
368 368 return self.sa.query(PullRequest)\
369 369 .filter(PullRequest.target_repo == repo)\
370 370 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
371 371
372 372 def get_artifacts(self, repo):
373 373 repo = self._get_repo(repo)
374 374 return self.sa.query(FileStore)\
375 375 .filter(FileStore.repo == repo)\
376 376 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
377 377
378 378 def mark_as_fork(self, repo, fork, user):
379 379 repo = self._get_repo(repo)
380 380 fork = self._get_repo(fork)
381 381 if fork and repo.repo_id == fork.repo_id:
382 382 raise Exception("Cannot set repository as fork of itself")
383 383
384 384 if fork and repo.repo_type != fork.repo_type:
385 385 raise RepositoryError(
386 386 "Cannot set repository as fork of repository with other type")
387 387
388 388 repo.fork = fork
389 389 self.sa.add(repo)
390 390 return repo
391 391
392 392 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
393 393 dbrepo = self._get_repo(repo)
394 394 remote_uri = remote_uri or dbrepo.clone_uri
395 395 if not remote_uri:
396 396 raise Exception("This repository doesn't have a clone uri")
397 397
398 398 repo = dbrepo.scm_instance(cache=False)
399 399 repo.config.clear_section('hooks')
400 400
401 401 try:
402 402 # NOTE(marcink): add extra validation so we skip invalid urls
403 403 # this is due this tasks can be executed via scheduler without
404 404 # proper validation of remote_uri
405 405 if validate_uri:
406 406 config = make_db_config(clear_session=False)
407 407 url_validator(remote_uri, dbrepo.repo_type, config)
408 408 except InvalidCloneUrl:
409 409 raise
410 410
411 411 repo_name = dbrepo.repo_name
412 412 try:
413 413 # TODO: we need to make sure those operations call proper hooks !
414 414 repo.fetch(remote_uri)
415 415
416 416 self.mark_for_invalidation(repo_name)
417 417 except Exception:
418 418 log.error(traceback.format_exc())
419 419 raise
420 420
421 421 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
422 422 dbrepo = self._get_repo(repo)
423 423 remote_uri = remote_uri or dbrepo.push_uri
424 424 if not remote_uri:
425 425 raise Exception("This repository doesn't have a clone uri")
426 426
427 427 repo = dbrepo.scm_instance(cache=False)
428 428 repo.config.clear_section('hooks')
429 429
430 430 try:
431 431 # NOTE(marcink): add extra validation so we skip invalid urls
432 432 # this is due this tasks can be executed via scheduler without
433 433 # proper validation of remote_uri
434 434 if validate_uri:
435 435 config = make_db_config(clear_session=False)
436 436 url_validator(remote_uri, dbrepo.repo_type, config)
437 437 except InvalidCloneUrl:
438 438 raise
439 439
440 440 try:
441 441 repo.push(remote_uri)
442 442 except Exception:
443 443 log.error(traceback.format_exc())
444 444 raise
445 445
446 446 def commit_change(self, repo, repo_name, commit, user, author, message,
447 447 content, f_path):
448 448 """
449 449 Commits changes
450 450
451 451 :param repo: SCM instance
452 452
453 453 """
454 454 user = self._get_user(user)
455 455
456 456 # decoding here will force that we have proper encoded values
457 457 # in any other case this will throw exceptions and deny commit
458 458 content = safe_str(content)
459 459 path = safe_str(f_path)
460 460 # message and author needs to be unicode
461 461 # proper backend should then translate that into required type
462 462 message = safe_unicode(message)
463 463 author = safe_unicode(author)
464 464 imc = repo.in_memory_commit
465 465 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
466 466 try:
467 467 # TODO: handle pre-push action !
468 468 tip = imc.commit(
469 469 message=message, author=author, parents=[commit],
470 470 branch=commit.branch)
471 471 except Exception as e:
472 472 log.error(traceback.format_exc())
473 473 raise IMCCommitError(str(e))
474 474 finally:
475 475 # always clear caches, if commit fails we want fresh object also
476 476 self.mark_for_invalidation(repo_name)
477 477
478 478 # We trigger the post-push action
479 479 hooks_utils.trigger_post_push_hook(
480 480 username=user.username, action='push_local', hook_type='post_push',
481 481 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
482 482 return tip
483 483
484 484 def _sanitize_path(self, f_path):
485 485 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
486 486 raise NonRelativePathError('%s is not an relative path' % f_path)
487 487 if f_path:
488 488 f_path = os.path.normpath(f_path)
489 489 return f_path
490 490
491 491 def get_dirnode_metadata(self, request, commit, dir_node):
492 492 if not dir_node.is_dir():
493 493 return []
494 494
495 495 data = []
496 496 for node in dir_node:
497 497 if not node.is_file():
498 498 # we skip file-nodes
499 499 continue
500 500
501 501 last_commit = node.last_commit
502 502 last_commit_date = last_commit.date
503 503 data.append({
504 504 'name': node.name,
505 505 'size': h.format_byte_size_binary(node.size),
506 506 'modified_at': h.format_date(last_commit_date),
507 507 'modified_ts': last_commit_date.isoformat(),
508 508 'revision': last_commit.revision,
509 509 'short_id': last_commit.short_id,
510 510 'message': h.escape(last_commit.message),
511 511 'author': h.escape(last_commit.author),
512 512 'user_profile': h.gravatar_with_user(
513 513 request, last_commit.author),
514 514 })
515 515
516 516 return data
517 517
518 518 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
519 519 extended_info=False, content=False, max_file_bytes=None):
520 520 """
521 521 recursive walk in root dir and return a set of all path in that dir
522 522 based on repository walk function
523 523
524 524 :param repo_name: name of repository
525 525 :param commit_id: commit id for which to list nodes
526 526 :param root_path: root path to list
527 527 :param flat: return as a list, if False returns a dict with description
528 528 :param extended_info: show additional info such as md5, binary, size etc
529 529 :param content: add nodes content to the return data
530 530 :param max_file_bytes: will not return file contents over this limit
531 531
532 532 """
533 533 _files = list()
534 534 _dirs = list()
535 535 try:
536 536 _repo = self._get_repo(repo_name)
537 537 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
538 538 root_path = root_path.lstrip('/')
539 539 for __, dirs, files in commit.walk(root_path):
540 540
541 541 for f in files:
542 542 _content = None
543 543 _data = f_name = f.unicode_path
544 544
545 545 if not flat:
546 546 _data = {
547 547 "name": h.escape(f_name),
548 548 "type": "file",
549 549 }
550 550 if extended_info:
551 551 _data.update({
552 552 "md5": f.md5,
553 553 "binary": f.is_binary,
554 554 "size": f.size,
555 555 "extension": f.extension,
556 556 "mimetype": f.mimetype,
557 557 "lines": f.lines()[0]
558 558 })
559 559
560 560 if content:
561 561 over_size_limit = (max_file_bytes is not None
562 562 and f.size > max_file_bytes)
563 563 full_content = None
564 564 if not f.is_binary and not over_size_limit:
565 565 full_content = safe_str(f.content)
566 566
567 567 _data.update({
568 568 "content": full_content,
569 569 })
570 570 _files.append(_data)
571 571
572 572 for d in dirs:
573 573 _data = d_name = d.unicode_path
574 574 if not flat:
575 575 _data = {
576 576 "name": h.escape(d_name),
577 577 "type": "dir",
578 578 }
579 579 if extended_info:
580 580 _data.update({
581 581 "md5": None,
582 582 "binary": None,
583 583 "size": None,
584 584 "extension": None,
585 585 })
586 586 if content:
587 587 _data.update({
588 588 "content": None
589 589 })
590 590 _dirs.append(_data)
591 591 except RepositoryError:
592 592 log.exception("Exception in get_nodes")
593 593 raise
594 594
595 595 return _dirs, _files
596 596
597 597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 598 """
599 599 Generate files for quick filter in files view
600 600 """
601 601
602 602 _files = list()
603 603 _dirs = list()
604 604 try:
605 605 _repo = self._get_repo(repo_name)
606 606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 607 root_path = root_path.lstrip('/')
608 608 for __, dirs, files in commit.walk(root_path):
609 609
610 610 for f in files:
611 611
612 612 _data = {
613 613 "name": h.escape(f.unicode_path),
614 614 "type": "file",
615 615 }
616 616
617 617 _files.append(_data)
618 618
619 619 for d in dirs:
620 620
621 621 _data = {
622 622 "name": h.escape(d.unicode_path),
623 623 "type": "dir",
624 624 }
625 625
626 626 _dirs.append(_data)
627 627 except RepositoryError:
628 628 log.exception("Exception in get_quick_filter_nodes")
629 629 raise
630 630
631 631 return _dirs, _files
632 632
633 633 def get_node(self, repo_name, commit_id, file_path,
634 634 extended_info=False, content=False, max_file_bytes=None, cache=True):
635 635 """
636 636 retrieve single node from commit
637 637 """
638 638 try:
639 639
640 640 _repo = self._get_repo(repo_name)
641 641 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
642 642
643 643 file_node = commit.get_node(file_path)
644 644 if file_node.is_dir():
645 645 raise RepositoryError('The given path is a directory')
646 646
647 647 _content = None
648 648 f_name = file_node.unicode_path
649 649
650 650 file_data = {
651 651 "name": h.escape(f_name),
652 652 "type": "file",
653 653 }
654 654
655 655 if extended_info:
656 656 file_data.update({
657 657 "extension": file_node.extension,
658 658 "mimetype": file_node.mimetype,
659 659 })
660 660
661 661 if cache:
662 662 md5 = file_node.md5
663 663 is_binary = file_node.is_binary
664 664 size = file_node.size
665 665 else:
666 666 is_binary, md5, size, _content = file_node.metadata_uncached()
667 667
668 668 file_data.update({
669 669 "md5": md5,
670 670 "binary": is_binary,
671 671 "size": size,
672 672 })
673 673
674 674 if content and cache:
675 675 # get content + cache
676 676 size = file_node.size
677 677 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
678 678 full_content = None
679 679 all_lines = 0
680 680 if not file_node.is_binary and not over_size_limit:
681 681 full_content = safe_unicode(file_node.content)
682 682 all_lines, empty_lines = file_node.count_lines(full_content)
683 683
684 684 file_data.update({
685 685 "content": full_content,
686 686 "lines": all_lines
687 687 })
688 688 elif content:
689 689 # get content *without* cache
690 690 if _content is None:
691 691 is_binary, md5, size, _content = file_node.metadata_uncached()
692 692
693 693 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
694 694 full_content = None
695 695 all_lines = 0
696 696 if not is_binary and not over_size_limit:
697 697 full_content = safe_unicode(_content)
698 698 all_lines, empty_lines = file_node.count_lines(full_content)
699 699
700 700 file_data.update({
701 701 "content": full_content,
702 702 "lines": all_lines
703 703 })
704 704
705 705 except RepositoryError:
706 706 log.exception("Exception in get_node")
707 707 raise
708 708
709 709 return file_data
710 710
711 711 def get_fts_data(self, repo_name, commit_id, root_path='/'):
712 712 """
713 713 Fetch node tree for usage in full text search
714 714 """
715 715
716 716 tree_info = list()
717 717
718 718 try:
719 719 _repo = self._get_repo(repo_name)
720 720 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
721 721 root_path = root_path.lstrip('/')
722 722 for __, dirs, files in commit.walk(root_path):
723 723
724 724 for f in files:
725 725 is_binary, md5, size, _content = f.metadata_uncached()
726 726 _data = {
727 727 "name": f.unicode_path,
728 728 "md5": md5,
729 729 "extension": f.extension,
730 730 "binary": is_binary,
731 731 "size": size
732 732 }
733 733
734 734 tree_info.append(_data)
735 735
736 736 except RepositoryError:
737 737 log.exception("Exception in get_nodes")
738 738 raise
739 739
740 740 return tree_info
741 741
742 742 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
743 743 author=None, trigger_push_hook=True):
744 744 """
745 745 Commits given multiple nodes into repo
746 746
747 747 :param user: RhodeCode User object or user_id, the commiter
748 748 :param repo: RhodeCode Repository object
749 749 :param message: commit message
750 750 :param nodes: mapping {filename:{'content':content},...}
751 751 :param parent_commit: parent commit, can be empty than it's
752 752 initial commit
753 753 :param author: author of commit, cna be different that commiter
754 754 only for git
755 755 :param trigger_push_hook: trigger push hooks
756 756
757 :returns: new commited commit
757 :returns: new committed commit
758 758 """
759 759
760 760 user = self._get_user(user)
761 761 scm_instance = repo.scm_instance(cache=False)
762 762
763 763 processed_nodes = []
764 764 for f_path in nodes:
765 765 f_path = self._sanitize_path(f_path)
766 766 content = nodes[f_path]['content']
767 767 f_path = safe_str(f_path)
768 768 # decoding here will force that we have proper encoded values
769 769 # in any other case this will throw exceptions and deny commit
770 770 if isinstance(content, (basestring,)):
771 771 content = safe_str(content)
772 772 elif isinstance(content, (file, cStringIO.OutputType,)):
773 773 content = content.read()
774 774 else:
775 775 raise Exception('Content is of unrecognized type %s' % (
776 776 type(content)
777 777 ))
778 778 processed_nodes.append((f_path, content))
779 779
780 780 message = safe_unicode(message)
781 781 commiter = user.full_contact
782 782 author = safe_unicode(author) if author else commiter
783 783
784 784 imc = scm_instance.in_memory_commit
785 785
786 786 if not parent_commit:
787 787 parent_commit = EmptyCommit(alias=scm_instance.alias)
788 788
789 789 if isinstance(parent_commit, EmptyCommit):
790 790 # EmptyCommit means we we're editing empty repository
791 791 parents = None
792 792 else:
793 793 parents = [parent_commit]
794 794 # add multiple nodes
795 795 for path, content in processed_nodes:
796 796 imc.add(FileNode(path, content=content))
797 797 # TODO: handle pre push scenario
798 798 tip = imc.commit(message=message,
799 799 author=author,
800 800 parents=parents,
801 801 branch=parent_commit.branch)
802 802
803 803 self.mark_for_invalidation(repo.repo_name)
804 804 if trigger_push_hook:
805 805 hooks_utils.trigger_post_push_hook(
806 806 username=user.username, action='push_local',
807 807 repo_name=repo.repo_name, repo_type=scm_instance.alias,
808 808 hook_type='post_push',
809 809 commit_ids=[tip.raw_id])
810 810 return tip
811 811
812 812 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
813 813 author=None, trigger_push_hook=True):
814 814 user = self._get_user(user)
815 815 scm_instance = repo.scm_instance(cache=False)
816 816
817 817 message = safe_unicode(message)
818 818 commiter = user.full_contact
819 819 author = safe_unicode(author) if author else commiter
820 820
821 821 imc = scm_instance.in_memory_commit
822 822
823 823 if not parent_commit:
824 824 parent_commit = EmptyCommit(alias=scm_instance.alias)
825 825
826 826 if isinstance(parent_commit, EmptyCommit):
827 827 # EmptyCommit means we we're editing empty repository
828 828 parents = None
829 829 else:
830 830 parents = [parent_commit]
831 831
832 832 # add multiple nodes
833 833 for _filename, data in nodes.items():
834 834 # new filename, can be renamed from the old one, also sanitaze
835 835 # the path for any hack around relative paths like ../../ etc.
836 836 filename = self._sanitize_path(data['filename'])
837 837 old_filename = self._sanitize_path(_filename)
838 838 content = data['content']
839 839 file_mode = data.get('mode')
840 840 filenode = FileNode(old_filename, content=content, mode=file_mode)
841 841 op = data['op']
842 842 if op == 'add':
843 843 imc.add(filenode)
844 844 elif op == 'del':
845 845 imc.remove(filenode)
846 846 elif op == 'mod':
847 847 if filename != old_filename:
848 848 # TODO: handle renames more efficient, needs vcs lib changes
849 849 imc.remove(filenode)
850 850 imc.add(FileNode(filename, content=content, mode=file_mode))
851 851 else:
852 852 imc.change(filenode)
853 853
854 854 try:
855 855 # TODO: handle pre push scenario commit changes
856 856 tip = imc.commit(message=message,
857 857 author=author,
858 858 parents=parents,
859 859 branch=parent_commit.branch)
860 860 except NodeNotChangedError:
861 861 raise
862 862 except Exception as e:
863 863 log.exception("Unexpected exception during call to imc.commit")
864 864 raise IMCCommitError(str(e))
865 865 finally:
866 866 # always clear caches, if commit fails we want fresh object also
867 867 self.mark_for_invalidation(repo.repo_name)
868 868
869 869 if trigger_push_hook:
870 870 hooks_utils.trigger_post_push_hook(
871 871 username=user.username, action='push_local', hook_type='post_push',
872 872 repo_name=repo.repo_name, repo_type=scm_instance.alias,
873 873 commit_ids=[tip.raw_id])
874 874
875 875 return tip
876 876
877 877 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
878 878 author=None, trigger_push_hook=True):
879 879 """
880 880 Deletes given multiple nodes into `repo`
881 881
882 882 :param user: RhodeCode User object or user_id, the committer
883 883 :param repo: RhodeCode Repository object
884 884 :param message: commit message
885 885 :param nodes: mapping {filename:{'content':content},...}
886 886 :param parent_commit: parent commit, can be empty than it's initial
887 887 commit
888 888 :param author: author of commit, cna be different that commiter only
889 889 for git
890 890 :param trigger_push_hook: trigger push hooks
891 891
892 892 :returns: new commit after deletion
893 893 """
894 894
895 895 user = self._get_user(user)
896 896 scm_instance = repo.scm_instance(cache=False)
897 897
898 898 processed_nodes = []
899 899 for f_path in nodes:
900 900 f_path = self._sanitize_path(f_path)
901 901 # content can be empty but for compatabilty it allows same dicts
902 902 # structure as add_nodes
903 903 content = nodes[f_path].get('content')
904 904 processed_nodes.append((f_path, content))
905 905
906 906 message = safe_unicode(message)
907 907 commiter = user.full_contact
908 908 author = safe_unicode(author) if author else commiter
909 909
910 910 imc = scm_instance.in_memory_commit
911 911
912 912 if not parent_commit:
913 913 parent_commit = EmptyCommit(alias=scm_instance.alias)
914 914
915 915 if isinstance(parent_commit, EmptyCommit):
916 916 # EmptyCommit means we we're editing empty repository
917 917 parents = None
918 918 else:
919 919 parents = [parent_commit]
920 920 # add multiple nodes
921 921 for path, content in processed_nodes:
922 922 imc.remove(FileNode(path, content=content))
923 923
924 924 # TODO: handle pre push scenario
925 925 tip = imc.commit(message=message,
926 926 author=author,
927 927 parents=parents,
928 928 branch=parent_commit.branch)
929 929
930 930 self.mark_for_invalidation(repo.repo_name)
931 931 if trigger_push_hook:
932 932 hooks_utils.trigger_post_push_hook(
933 933 username=user.username, action='push_local', hook_type='post_push',
934 934 repo_name=repo.repo_name, repo_type=scm_instance.alias,
935 935 commit_ids=[tip.raw_id])
936 936 return tip
937 937
938 938 def strip(self, repo, commit_id, branch):
939 939 scm_instance = repo.scm_instance(cache=False)
940 940 scm_instance.config.clear_section('hooks')
941 941 scm_instance.strip(commit_id, branch)
942 942 self.mark_for_invalidation(repo.repo_name)
943 943
944 944 def get_unread_journal(self):
945 945 return self.sa.query(UserLog).count()
946 946
947 947 @classmethod
948 948 def backend_landing_ref(cls, repo_type):
949 949 """
950 950 Return a default landing ref based on a repository type.
951 951 """
952 952
953 953 landing_ref = {
954 954 'hg': ('branch:default', 'default'),
955 955 'git': ('branch:master', 'master'),
956 956 'svn': ('rev:tip', 'latest tip'),
957 957 'default': ('rev:tip', 'latest tip'),
958 958 }
959 959
960 960 return landing_ref.get(repo_type) or landing_ref['default']
961 961
962 962 def get_repo_landing_revs(self, translator, repo=None):
963 963 """
964 964 Generates select option with tags branches and bookmarks (for hg only)
965 965 grouped by type
966 966
967 967 :param repo:
968 968 """
969 969 from rhodecode.lib.vcs.backends.git import GitRepository
970 970
971 971 _ = translator
972 972 repo = self._get_repo(repo)
973 973
974 974 if repo:
975 975 repo_type = repo.repo_type
976 976 else:
977 977 repo_type = 'default'
978 978
979 979 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
980 980
981 981 default_ref_options = [
982 982 [default_landing_ref, landing_ref_lbl]
983 983 ]
984 984 default_choices = [
985 985 default_landing_ref
986 986 ]
987 987
988 988 if not repo:
989 989 # presented at NEW repo creation
990 990 return default_choices, default_ref_options
991 991
992 992 repo = repo.scm_instance()
993 993
994 994 ref_options = [(default_landing_ref, landing_ref_lbl)]
995 995 choices = [default_landing_ref]
996 996
997 997 # branches
998 998 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
999 999 if not branch_group:
1000 1000 # new repo, or without maybe a branch?
1001 1001 branch_group = default_ref_options
1002 1002
1003 1003 branches_group = (branch_group, _("Branches"))
1004 1004 ref_options.append(branches_group)
1005 1005 choices.extend([x[0] for x in branches_group[0]])
1006 1006
1007 1007 # bookmarks for HG
1008 1008 if repo.alias == 'hg':
1009 1009 bookmarks_group = (
1010 1010 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1011 1011 for b in repo.bookmarks],
1012 1012 _("Bookmarks"))
1013 1013 ref_options.append(bookmarks_group)
1014 1014 choices.extend([x[0] for x in bookmarks_group[0]])
1015 1015
1016 1016 # tags
1017 1017 tags_group = (
1018 1018 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1019 1019 for t in repo.tags],
1020 1020 _("Tags"))
1021 1021 ref_options.append(tags_group)
1022 1022 choices.extend([x[0] for x in tags_group[0]])
1023 1023
1024 1024 return choices, ref_options
1025 1025
1026 1026 def get_server_info(self, environ=None):
1027 1027 server_info = get_system_info(environ)
1028 1028 return server_info
@@ -1,38 +1,38 b''
1 1 ## -*- coding: utf-8 -*-
2 2
3 ${_('%(user)s commited on %(date)s UTC') % {
3 ${_('%(user)s committed on %(date)s UTC') % {
4 4 'user': h.person(commit.author),
5 5 'date': h.format_date(commit.date)
6 6 }}
7 7 <br/>
8 8 % if commit.branch:
9 9 branch: ${commit.branch} <br/>
10 10 % endif
11 11
12 12 % for bookmark in getattr(commit, 'bookmarks', []):
13 13 bookmark: ${bookmark} <br/>
14 14 % endfor
15 15
16 16 % for tag in commit.tags:
17 17 tag: ${tag} <br/>
18 18 % endfor
19 19
20 20 % if has_hidden_changes:
21 21 Has hidden changes<br/>
22 22 % endif
23 23
24 24 commit: <a href="${h.route_url('repo_commit', repo_name=c.rhodecode_db_repo.repo_name, commit_id=commit.raw_id)}">${h.show_id(commit)}</a>
25 25 <pre>
26 26 ${h.urlify_commit_message(commit.message)}
27 27
28 28 % for change in parsed_diff:
29 29 % if limited_diff:
30 30 ${_('Commit was too big and was cut off...')}
31 31 % endif
32 32 ${change['operation']} ${change['filename']} ${'(%(added)s lines added, %(removed)s lines removed)' % {'added': change['stats']['added'], 'removed': change['stats']['deleted']}}
33 33 % endfor
34 34
35 35 % if feed_include_diff:
36 36 ${c.path_filter.get_raw_patch(diff_processor)}
37 37 % endif
38 38 </pre>
@@ -1,1840 +1,1842 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 import logging
33 34
34 35 import mock
35 36 import pyramid.testing
36 37 import pytest
37 38 import colander
38 39 import requests
39 40 import pyramid.paster
40 41
41 42 import rhodecode
42 43 from rhodecode.lib.utils2 import AttributeDict
43 44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 45 from rhodecode.model.comment import CommentsModel
45 46 from rhodecode.model.db import (
46 47 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 48 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 49 from rhodecode.model.meta import Session
49 50 from rhodecode.model.pull_request import PullRequestModel
50 51 from rhodecode.model.repo import RepoModel
51 52 from rhodecode.model.repo_group import RepoGroupModel
52 53 from rhodecode.model.user import UserModel
53 54 from rhodecode.model.settings import VcsSettingsModel
54 55 from rhodecode.model.user_group import UserGroupModel
55 56 from rhodecode.model.integration import IntegrationModel
56 57 from rhodecode.integrations import integration_type_registry
57 58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 59 from rhodecode.lib.utils import repo2db_mapper
59 60 from rhodecode.lib.vcs.backends import get_backend
60 61 from rhodecode.lib.vcs.nodes import FileNode
61 62 from rhodecode.tests import (
62 63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 65 TEST_USER_REGULAR_PASS)
65 66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
66 67 from rhodecode.tests.fixture import Fixture
67 68 from rhodecode.config import utils as config_utils
68 69
70 log = logging.getLogger(__name__)
69 71
70 72 def _split_comma(value):
71 73 return value.split(',')
72 74
73 75
74 76 def pytest_addoption(parser):
75 77 parser.addoption(
76 78 '--keep-tmp-path', action='store_true',
77 79 help="Keep the test temporary directories")
78 80 parser.addoption(
79 81 '--backends', action='store', type=_split_comma,
80 82 default=['git', 'hg', 'svn'],
81 83 help="Select which backends to test for backend specific tests.")
82 84 parser.addoption(
83 85 '--dbs', action='store', type=_split_comma,
84 86 default=['sqlite'],
85 87 help="Select which database to test for database specific tests. "
86 88 "Possible options are sqlite,postgres,mysql")
87 89 parser.addoption(
88 90 '--appenlight', '--ae', action='store_true',
89 91 help="Track statistics in appenlight.")
90 92 parser.addoption(
91 93 '--appenlight-api-key', '--ae-key',
92 94 help="API key for Appenlight.")
93 95 parser.addoption(
94 96 '--appenlight-url', '--ae-url',
95 97 default="https://ae.rhodecode.com",
96 98 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 99 parser.addoption(
98 100 '--sqlite-connection-string', action='store',
99 101 default='', help="Connection string for the dbs tests with SQLite")
100 102 parser.addoption(
101 103 '--postgres-connection-string', action='store',
102 104 default='', help="Connection string for the dbs tests with Postgres")
103 105 parser.addoption(
104 106 '--mysql-connection-string', action='store',
105 107 default='', help="Connection string for the dbs tests with MySQL")
106 108 parser.addoption(
107 109 '--repeat', type=int, default=100,
108 110 help="Number of repetitions in performance tests.")
109 111
110 112
111 113 def pytest_configure(config):
112 114 from rhodecode.config import patches
113 115
114 116
115 117 def pytest_collection_modifyitems(session, config, items):
116 118 # nottest marked, compare nose, used for transition from nose to pytest
117 119 remaining = [
118 120 i for i in items if getattr(i.obj, '__test__', True)]
119 121 items[:] = remaining
120 122
121 123 # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should
122 124 # be executed at the end for faster test feedback
123 125 def sorter(item):
124 126 pos = 0
125 127 key = item._nodeid
126 128 if key.startswith('rhodecode/tests/database'):
127 129 pos = 1
128 130 elif key.startswith('rhodecode/tests/vcs_operations'):
129 131 pos = 2
130 132
131 133 return pos
132 134
133 135 items.sort(key=sorter)
134 136
135 137
136 138 def pytest_generate_tests(metafunc):
137 139
138 140 # Support test generation based on --backend parameter
139 141 if 'backend_alias' in metafunc.fixturenames:
140 142 backends = get_backends_from_metafunc(metafunc)
141 143 scope = None
142 144 if not backends:
143 145 pytest.skip("Not enabled for any of selected backends")
144 146
145 147 metafunc.parametrize('backend_alias', backends, scope=scope)
146 148
147 149 backend_mark = metafunc.definition.get_closest_marker('backends')
148 150 if backend_mark:
149 151 backends = get_backends_from_metafunc(metafunc)
150 152 if not backends:
151 153 pytest.skip("Not enabled for any of selected backends")
152 154
153 155
154 156 def get_backends_from_metafunc(metafunc):
155 157 requested_backends = set(metafunc.config.getoption('--backends'))
156 158 backend_mark = metafunc.definition.get_closest_marker('backends')
157 159 if backend_mark:
158 160 # Supported backends by this test function, created from
159 161 # pytest.mark.backends
160 162 backends = backend_mark.args
161 163 elif hasattr(metafunc.cls, 'backend_alias'):
162 164 # Support class attribute "backend_alias", this is mainly
163 165 # for legacy reasons for tests not yet using pytest.mark.backends
164 166 backends = [metafunc.cls.backend_alias]
165 167 else:
166 168 backends = metafunc.config.getoption('--backends')
167 169 return requested_backends.intersection(backends)
168 170
169 171
170 172 @pytest.fixture(scope='session', autouse=True)
171 173 def activate_example_rcextensions(request):
172 174 """
173 175 Patch in an example rcextensions module which verifies passed in kwargs.
174 176 """
175 177 from rhodecode.config import rcextensions
176 178
177 179 old_extensions = rhodecode.EXTENSIONS
178 180 rhodecode.EXTENSIONS = rcextensions
179 181 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
180 182
181 183 @request.addfinalizer
182 184 def cleanup():
183 185 rhodecode.EXTENSIONS = old_extensions
184 186
185 187
186 188 @pytest.fixture()
187 189 def capture_rcextensions():
188 190 """
189 191 Returns the recorded calls to entry points in rcextensions.
190 192 """
191 193 calls = rhodecode.EXTENSIONS.calls
192 194 calls.clear()
193 195 # Note: At this moment, it is still the empty dict, but that will
194 196 # be filled during the test run and since it is a reference this
195 197 # is enough to make it work.
196 198 return calls
197 199
198 200
199 201 @pytest.fixture(scope='session')
200 202 def http_environ_session():
201 203 """
202 204 Allow to use "http_environ" in session scope.
203 205 """
204 206 return plain_http_environ()
205 207
206 208
207 209 def plain_http_host_stub():
208 210 """
209 211 Value of HTTP_HOST in the test run.
210 212 """
211 213 return 'example.com:80'
212 214
213 215
214 216 @pytest.fixture()
215 217 def http_host_stub():
216 218 """
217 219 Value of HTTP_HOST in the test run.
218 220 """
219 221 return plain_http_host_stub()
220 222
221 223
222 224 def plain_http_host_only_stub():
223 225 """
224 226 Value of HTTP_HOST in the test run.
225 227 """
226 228 return plain_http_host_stub().split(':')[0]
227 229
228 230
229 231 @pytest.fixture()
230 232 def http_host_only_stub():
231 233 """
232 234 Value of HTTP_HOST in the test run.
233 235 """
234 236 return plain_http_host_only_stub()
235 237
236 238
237 239 def plain_http_environ():
238 240 """
239 241 HTTP extra environ keys.
240 242
241 243 User by the test application and as well for setting up the pylons
242 244 environment. In the case of the fixture "app" it should be possible
243 245 to override this for a specific test case.
244 246 """
245 247 return {
246 248 'SERVER_NAME': plain_http_host_only_stub(),
247 249 'SERVER_PORT': plain_http_host_stub().split(':')[1],
248 250 'HTTP_HOST': plain_http_host_stub(),
249 251 'HTTP_USER_AGENT': 'rc-test-agent',
250 252 'REQUEST_METHOD': 'GET'
251 253 }
252 254
253 255
254 256 @pytest.fixture()
255 257 def http_environ():
256 258 """
257 259 HTTP extra environ keys.
258 260
259 261 User by the test application and as well for setting up the pylons
260 262 environment. In the case of the fixture "app" it should be possible
261 263 to override this for a specific test case.
262 264 """
263 265 return plain_http_environ()
264 266
265 267
266 268 @pytest.fixture(scope='session')
267 269 def baseapp(ini_config, vcsserver, http_environ_session):
268 270 from rhodecode.lib.pyramid_utils import get_app_config
269 271 from rhodecode.config.middleware import make_pyramid_app
270 272
271 print("Using the RhodeCode configuration:{}".format(ini_config))
273 log.info("Using the RhodeCode configuration:{}".format(ini_config))
272 274 pyramid.paster.setup_logging(ini_config)
273 275
274 276 settings = get_app_config(ini_config)
275 277 app = make_pyramid_app({'__file__': ini_config}, **settings)
276 278
277 279 return app
278 280
279 281
280 282 @pytest.fixture(scope='function')
281 283 def app(request, config_stub, baseapp, http_environ):
282 284 app = CustomTestApp(
283 285 baseapp,
284 286 extra_environ=http_environ)
285 287 if request.cls:
286 288 request.cls.app = app
287 289 return app
288 290
289 291
290 292 @pytest.fixture(scope='session')
291 293 def app_settings(baseapp, ini_config):
292 294 """
293 295 Settings dictionary used to create the app.
294 296
295 297 Parses the ini file and passes the result through the sanitize and apply
296 298 defaults mechanism in `rhodecode.config.middleware`.
297 299 """
298 300 return baseapp.config.get_settings()
299 301
300 302
301 303 @pytest.fixture(scope='session')
302 304 def db_connection(ini_settings):
303 305 # Initialize the database connection.
304 306 config_utils.initialize_database(ini_settings)
305 307
306 308
307 309 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
308 310
309 311
310 312 def _autologin_user(app, *args):
311 313 session = login_user_session(app, *args)
312 314 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
313 315 return LoginData(csrf_token, session['rhodecode_user'])
314 316
315 317
316 318 @pytest.fixture()
317 319 def autologin_user(app):
318 320 """
319 321 Utility fixture which makes sure that the admin user is logged in
320 322 """
321 323 return _autologin_user(app)
322 324
323 325
324 326 @pytest.fixture()
325 327 def autologin_regular_user(app):
326 328 """
327 329 Utility fixture which makes sure that the regular user is logged in
328 330 """
329 331 return _autologin_user(
330 332 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
331 333
332 334
333 335 @pytest.fixture(scope='function')
334 336 def csrf_token(request, autologin_user):
335 337 return autologin_user.csrf_token
336 338
337 339
338 340 @pytest.fixture(scope='function')
339 341 def xhr_header(request):
340 342 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
341 343
342 344
343 345 @pytest.fixture()
344 346 def real_crypto_backend(monkeypatch):
345 347 """
346 348 Switch the production crypto backend on for this test.
347 349
348 350 During the test run the crypto backend is replaced with a faster
349 351 implementation based on the MD5 algorithm.
350 352 """
351 353 monkeypatch.setattr(rhodecode, 'is_test', False)
352 354
353 355
354 356 @pytest.fixture(scope='class')
355 357 def index_location(request, baseapp):
356 358 index_location = baseapp.config.get_settings()['search.location']
357 359 if request.cls:
358 360 request.cls.index_location = index_location
359 361 return index_location
360 362
361 363
362 364 @pytest.fixture(scope='session', autouse=True)
363 365 def tests_tmp_path(request):
364 366 """
365 367 Create temporary directory to be used during the test session.
366 368 """
367 369 if not os.path.exists(TESTS_TMP_PATH):
368 370 os.makedirs(TESTS_TMP_PATH)
369 371
370 372 if not request.config.getoption('--keep-tmp-path'):
371 373 @request.addfinalizer
372 374 def remove_tmp_path():
373 375 shutil.rmtree(TESTS_TMP_PATH)
374 376
375 377 return TESTS_TMP_PATH
376 378
377 379
378 380 @pytest.fixture()
379 381 def test_repo_group(request):
380 382 """
381 383 Create a temporary repository group, and destroy it after
382 384 usage automatically
383 385 """
384 386 fixture = Fixture()
385 387 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
386 388 repo_group = fixture.create_repo_group(repogroupid)
387 389
388 390 def _cleanup():
389 391 fixture.destroy_repo_group(repogroupid)
390 392
391 393 request.addfinalizer(_cleanup)
392 394 return repo_group
393 395
394 396
395 397 @pytest.fixture()
396 398 def test_user_group(request):
397 399 """
398 400 Create a temporary user group, and destroy it after
399 401 usage automatically
400 402 """
401 403 fixture = Fixture()
402 404 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
403 405 user_group = fixture.create_user_group(usergroupid)
404 406
405 407 def _cleanup():
406 408 fixture.destroy_user_group(user_group)
407 409
408 410 request.addfinalizer(_cleanup)
409 411 return user_group
410 412
411 413
412 414 @pytest.fixture(scope='session')
413 415 def test_repo(request):
414 416 container = TestRepoContainer()
415 417 request.addfinalizer(container._cleanup)
416 418 return container
417 419
418 420
419 421 class TestRepoContainer(object):
420 422 """
421 423 Container for test repositories which are used read only.
422 424
423 425 Repositories will be created on demand and re-used during the lifetime
424 426 of this object.
425 427
426 428 Usage to get the svn test repository "minimal"::
427 429
428 430 test_repo = TestContainer()
429 431 repo = test_repo('minimal', 'svn')
430 432
431 433 """
432 434
433 435 dump_extractors = {
434 436 'git': utils.extract_git_repo_from_dump,
435 437 'hg': utils.extract_hg_repo_from_dump,
436 438 'svn': utils.extract_svn_repo_from_dump,
437 439 }
438 440
439 441 def __init__(self):
440 442 self._cleanup_repos = []
441 443 self._fixture = Fixture()
442 444 self._repos = {}
443 445
444 446 def __call__(self, dump_name, backend_alias, config=None):
445 447 key = (dump_name, backend_alias)
446 448 if key not in self._repos:
447 449 repo = self._create_repo(dump_name, backend_alias, config)
448 450 self._repos[key] = repo.repo_id
449 451 return Repository.get(self._repos[key])
450 452
451 453 def _create_repo(self, dump_name, backend_alias, config):
452 454 repo_name = '%s-%s' % (backend_alias, dump_name)
453 455 backend = get_backend(backend_alias)
454 456 dump_extractor = self.dump_extractors[backend_alias]
455 457 repo_path = dump_extractor(dump_name, repo_name)
456 458
457 459 vcs_repo = backend(repo_path, config=config)
458 460 repo2db_mapper({repo_name: vcs_repo})
459 461
460 462 repo = RepoModel().get_by_repo_name(repo_name)
461 463 self._cleanup_repos.append(repo_name)
462 464 return repo
463 465
464 466 def _cleanup(self):
465 467 for repo_name in reversed(self._cleanup_repos):
466 468 self._fixture.destroy_repo(repo_name)
467 469
468 470
469 471 def backend_base(request, backend_alias, baseapp, test_repo):
470 472 if backend_alias not in request.config.getoption('--backends'):
471 473 pytest.skip("Backend %s not selected." % (backend_alias, ))
472 474
473 475 utils.check_xfail_backends(request.node, backend_alias)
474 476 utils.check_skip_backends(request.node, backend_alias)
475 477
476 478 repo_name = 'vcs_test_%s' % (backend_alias, )
477 479 backend = Backend(
478 480 alias=backend_alias,
479 481 repo_name=repo_name,
480 482 test_name=request.node.name,
481 483 test_repo_container=test_repo)
482 484 request.addfinalizer(backend.cleanup)
483 485 return backend
484 486
485 487
486 488 @pytest.fixture()
487 489 def backend(request, backend_alias, baseapp, test_repo):
488 490 """
489 491 Parametrized fixture which represents a single backend implementation.
490 492
491 493 It respects the option `--backends` to focus the test run on specific
492 494 backend implementations.
493 495
494 496 It also supports `pytest.mark.xfail_backends` to mark tests as failing
495 497 for specific backends. This is intended as a utility for incremental
496 498 development of a new backend implementation.
497 499 """
498 500 return backend_base(request, backend_alias, baseapp, test_repo)
499 501
500 502
501 503 @pytest.fixture()
502 504 def backend_git(request, baseapp, test_repo):
503 505 return backend_base(request, 'git', baseapp, test_repo)
504 506
505 507
506 508 @pytest.fixture()
507 509 def backend_hg(request, baseapp, test_repo):
508 510 return backend_base(request, 'hg', baseapp, test_repo)
509 511
510 512
511 513 @pytest.fixture()
512 514 def backend_svn(request, baseapp, test_repo):
513 515 return backend_base(request, 'svn', baseapp, test_repo)
514 516
515 517
516 518 @pytest.fixture()
517 519 def backend_random(backend_git):
518 520 """
519 521 Use this to express that your tests need "a backend.
520 522
521 523 A few of our tests need a backend, so that we can run the code. This
522 524 fixture is intended to be used for such cases. It will pick one of the
523 525 backends and run the tests.
524 526
525 527 The fixture `backend` would run the test multiple times for each
526 528 available backend which is a pure waste of time if the test is
527 529 independent of the backend type.
528 530 """
529 531 # TODO: johbo: Change this to pick a random backend
530 532 return backend_git
531 533
532 534
533 535 @pytest.fixture()
534 536 def backend_stub(backend_git):
535 537 """
536 538 Use this to express that your tests need a backend stub
537 539
538 540 TODO: mikhail: Implement a real stub logic instead of returning
539 541 a git backend
540 542 """
541 543 return backend_git
542 544
543 545
544 546 @pytest.fixture()
545 547 def repo_stub(backend_stub):
546 548 """
547 549 Use this to express that your tests need a repository stub
548 550 """
549 551 return backend_stub.create_repo()
550 552
551 553
552 554 class Backend(object):
553 555 """
554 556 Represents the test configuration for one supported backend
555 557
556 558 Provides easy access to different test repositories based on
557 559 `__getitem__`. Such repositories will only be created once per test
558 560 session.
559 561 """
560 562
561 563 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
562 564 _master_repo = None
563 565 _master_repo_path = ''
564 566 _commit_ids = {}
565 567
566 568 def __init__(self, alias, repo_name, test_name, test_repo_container):
567 569 self.alias = alias
568 570 self.repo_name = repo_name
569 571 self._cleanup_repos = []
570 572 self._test_name = test_name
571 573 self._test_repo_container = test_repo_container
572 574 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
573 575 # Fixture will survive in the end.
574 576 self._fixture = Fixture()
575 577
576 578 def __getitem__(self, key):
577 579 return self._test_repo_container(key, self.alias)
578 580
579 581 def create_test_repo(self, key, config=None):
580 582 return self._test_repo_container(key, self.alias, config)
581 583
582 584 @property
583 585 def repo(self):
584 586 """
585 587 Returns the "current" repository. This is the vcs_test repo or the
586 588 last repo which has been created with `create_repo`.
587 589 """
588 590 from rhodecode.model.db import Repository
589 591 return Repository.get_by_repo_name(self.repo_name)
590 592
591 593 @property
592 594 def default_branch_name(self):
593 595 VcsRepository = get_backend(self.alias)
594 596 return VcsRepository.DEFAULT_BRANCH_NAME
595 597
596 598 @property
597 599 def default_head_id(self):
598 600 """
599 601 Returns the default head id of the underlying backend.
600 602
601 603 This will be the default branch name in case the backend does have a
602 604 default branch. In the other cases it will point to a valid head
603 605 which can serve as the base to create a new commit on top of it.
604 606 """
605 607 vcsrepo = self.repo.scm_instance()
606 608 head_id = (
607 609 vcsrepo.DEFAULT_BRANCH_NAME or
608 610 vcsrepo.commit_ids[-1])
609 611 return head_id
610 612
611 613 @property
612 614 def commit_ids(self):
613 615 """
614 616 Returns the list of commits for the last created repository
615 617 """
616 618 return self._commit_ids
617 619
618 620 def create_master_repo(self, commits):
619 621 """
620 622 Create a repository and remember it as a template.
621 623
622 624 This allows to easily create derived repositories to construct
623 625 more complex scenarios for diff, compare and pull requests.
624 626
625 627 Returns a commit map which maps from commit message to raw_id.
626 628 """
627 629 self._master_repo = self.create_repo(commits=commits)
628 630 self._master_repo_path = self._master_repo.repo_full_path
629 631
630 632 return self._commit_ids
631 633
632 634 def create_repo(
633 635 self, commits=None, number_of_commits=0, heads=None,
634 636 name_suffix=u'', bare=False, **kwargs):
635 637 """
636 638 Create a repository and record it for later cleanup.
637 639
638 640 :param commits: Optional. A sequence of dict instances.
639 641 Will add a commit per entry to the new repository.
640 642 :param number_of_commits: Optional. If set to a number, this number of
641 643 commits will be added to the new repository.
642 644 :param heads: Optional. Can be set to a sequence of of commit
643 645 names which shall be pulled in from the master repository.
644 646 :param name_suffix: adds special suffix to generated repo name
645 647 :param bare: set a repo as bare (no checkout)
646 648 """
647 649 self.repo_name = self._next_repo_name() + name_suffix
648 650 repo = self._fixture.create_repo(
649 651 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
650 652 self._cleanup_repos.append(repo.repo_name)
651 653
652 654 commits = commits or [
653 655 {'message': 'Commit %s of %s' % (x, self.repo_name)}
654 656 for x in range(number_of_commits)]
655 657 vcs_repo = repo.scm_instance()
656 658 vcs_repo.count()
657 659 self._add_commits_to_repo(vcs_repo, commits)
658 660 if heads:
659 661 self.pull_heads(repo, heads)
660 662
661 663 return repo
662 664
663 665 def pull_heads(self, repo, heads):
664 666 """
665 667 Make sure that repo contains all commits mentioned in `heads`
666 668 """
667 669 vcsrepo = repo.scm_instance()
668 670 vcsrepo.config.clear_section('hooks')
669 671 commit_ids = [self._commit_ids[h] for h in heads]
670 672 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
671 673
672 674 def create_fork(self):
673 675 repo_to_fork = self.repo_name
674 676 self.repo_name = self._next_repo_name()
675 677 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
676 678 self._cleanup_repos.append(self.repo_name)
677 679 return repo
678 680
679 681 def new_repo_name(self, suffix=u''):
680 682 self.repo_name = self._next_repo_name() + suffix
681 683 self._cleanup_repos.append(self.repo_name)
682 684 return self.repo_name
683 685
684 686 def _next_repo_name(self):
685 687 return u"%s_%s" % (
686 688 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
687 689
688 690 def ensure_file(self, filename, content='Test content\n'):
689 691 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
690 692 commits = [
691 693 {'added': [
692 694 FileNode(filename, content=content),
693 695 ]},
694 696 ]
695 697 self._add_commits_to_repo(self.repo.scm_instance(), commits)
696 698
697 699 def enable_downloads(self):
698 700 repo = self.repo
699 701 repo.enable_downloads = True
700 702 Session().add(repo)
701 703 Session().commit()
702 704
703 705 def cleanup(self):
704 706 for repo_name in reversed(self._cleanup_repos):
705 707 self._fixture.destroy_repo(repo_name)
706 708
707 709 def _add_commits_to_repo(self, repo, commits):
708 710 commit_ids = _add_commits_to_repo(repo, commits)
709 711 if not commit_ids:
710 712 return
711 713 self._commit_ids = commit_ids
712 714
713 715 # Creating refs for Git to allow fetching them from remote repository
714 716 if self.alias == 'git':
715 717 refs = {}
716 718 for message in self._commit_ids:
717 719 # TODO: mikhail: do more special chars replacements
718 720 ref_name = 'refs/test-refs/{}'.format(
719 721 message.replace(' ', ''))
720 722 refs[ref_name] = self._commit_ids[message]
721 723 self._create_refs(repo, refs)
722 724
723 725 def _create_refs(self, repo, refs):
724 726 for ref_name in refs:
725 727 repo.set_refs(ref_name, refs[ref_name])
726 728
727 729
728 730 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
729 731 if backend_alias not in request.config.getoption('--backends'):
730 732 pytest.skip("Backend %s not selected." % (backend_alias, ))
731 733
732 734 utils.check_xfail_backends(request.node, backend_alias)
733 735 utils.check_skip_backends(request.node, backend_alias)
734 736
735 737 repo_name = 'vcs_test_%s' % (backend_alias, )
736 738 repo_path = os.path.join(tests_tmp_path, repo_name)
737 739 backend = VcsBackend(
738 740 alias=backend_alias,
739 741 repo_path=repo_path,
740 742 test_name=request.node.name,
741 743 test_repo_container=test_repo)
742 744 request.addfinalizer(backend.cleanup)
743 745 return backend
744 746
745 747
746 748 @pytest.fixture()
747 749 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
748 750 """
749 751 Parametrized fixture which represents a single vcs backend implementation.
750 752
751 753 See the fixture `backend` for more details. This one implements the same
752 754 concept, but on vcs level. So it does not provide model instances etc.
753 755
754 756 Parameters are generated dynamically, see :func:`pytest_generate_tests`
755 757 for how this works.
756 758 """
757 759 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
758 760
759 761
760 762 @pytest.fixture()
761 763 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
762 764 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
763 765
764 766
765 767 @pytest.fixture()
766 768 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
767 769 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
768 770
769 771
770 772 @pytest.fixture()
771 773 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
772 774 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
773 775
774 776
775 777 @pytest.fixture()
776 778 def vcsbackend_stub(vcsbackend_git):
777 779 """
778 780 Use this to express that your test just needs a stub of a vcsbackend.
779 781
780 782 Plan is to eventually implement an in-memory stub to speed tests up.
781 783 """
782 784 return vcsbackend_git
783 785
784 786
785 787 class VcsBackend(object):
786 788 """
787 789 Represents the test configuration for one supported vcs backend.
788 790 """
789 791
790 792 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
791 793
792 794 def __init__(self, alias, repo_path, test_name, test_repo_container):
793 795 self.alias = alias
794 796 self._repo_path = repo_path
795 797 self._cleanup_repos = []
796 798 self._test_name = test_name
797 799 self._test_repo_container = test_repo_container
798 800
799 801 def __getitem__(self, key):
800 802 return self._test_repo_container(key, self.alias).scm_instance()
801 803
802 804 @property
803 805 def repo(self):
804 806 """
805 807 Returns the "current" repository. This is the vcs_test repo of the last
806 808 repo which has been created.
807 809 """
808 810 Repository = get_backend(self.alias)
809 811 return Repository(self._repo_path)
810 812
811 813 @property
812 814 def backend(self):
813 815 """
814 816 Returns the backend implementation class.
815 817 """
816 818 return get_backend(self.alias)
817 819
818 820 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
819 821 bare=False):
820 822 repo_name = self._next_repo_name()
821 823 self._repo_path = get_new_dir(repo_name)
822 824 repo_class = get_backend(self.alias)
823 825 src_url = None
824 826 if _clone_repo:
825 827 src_url = _clone_repo.path
826 828 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
827 829 self._cleanup_repos.append(repo)
828 830
829 831 commits = commits or [
830 832 {'message': 'Commit %s of %s' % (x, repo_name)}
831 833 for x in xrange(number_of_commits)]
832 834 _add_commits_to_repo(repo, commits)
833 835 return repo
834 836
835 837 def clone_repo(self, repo):
836 838 return self.create_repo(_clone_repo=repo)
837 839
838 840 def cleanup(self):
839 841 for repo in self._cleanup_repos:
840 842 shutil.rmtree(repo.path)
841 843
842 844 def new_repo_path(self):
843 845 repo_name = self._next_repo_name()
844 846 self._repo_path = get_new_dir(repo_name)
845 847 return self._repo_path
846 848
847 849 def _next_repo_name(self):
848 850 return "%s_%s" % (
849 851 self.invalid_repo_name.sub('_', self._test_name),
850 852 len(self._cleanup_repos))
851 853
852 854 def add_file(self, repo, filename, content='Test content\n'):
853 855 imc = repo.in_memory_commit
854 856 imc.add(FileNode(filename, content=content))
855 857 imc.commit(
856 858 message=u'Automatic commit from vcsbackend fixture',
857 859 author=u'Automatic <automatic@rhodecode.com>')
858 860
859 861 def ensure_file(self, filename, content='Test content\n'):
860 862 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
861 863 self.add_file(self.repo, filename, content)
862 864
863 865
864 866 def _add_commits_to_repo(vcs_repo, commits):
865 867 commit_ids = {}
866 868 if not commits:
867 869 return commit_ids
868 870
869 871 imc = vcs_repo.in_memory_commit
870 872 commit = None
871 873
872 874 for idx, commit in enumerate(commits):
873 875 message = unicode(commit.get('message', 'Commit %s' % idx))
874 876
875 877 for node in commit.get('added', []):
876 878 imc.add(FileNode(node.path, content=node.content))
877 879 for node in commit.get('changed', []):
878 880 imc.change(FileNode(node.path, content=node.content))
879 881 for node in commit.get('removed', []):
880 882 imc.remove(FileNode(node.path))
881 883
882 884 parents = [
883 885 vcs_repo.get_commit(commit_id=commit_ids[p])
884 886 for p in commit.get('parents', [])]
885 887
886 888 operations = ('added', 'changed', 'removed')
887 889 if not any((commit.get(o) for o in operations)):
888 890 imc.add(FileNode('file_%s' % idx, content=message))
889 891
890 892 commit = imc.commit(
891 893 message=message,
892 894 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
893 895 date=commit.get('date'),
894 896 branch=commit.get('branch'),
895 897 parents=parents)
896 898
897 899 commit_ids[commit.message] = commit.raw_id
898 900
899 901 return commit_ids
900 902
901 903
902 904 @pytest.fixture()
903 905 def reposerver(request):
904 906 """
905 907 Allows to serve a backend repository
906 908 """
907 909
908 910 repo_server = RepoServer()
909 911 request.addfinalizer(repo_server.cleanup)
910 912 return repo_server
911 913
912 914
913 915 class RepoServer(object):
914 916 """
915 917 Utility to serve a local repository for the duration of a test case.
916 918
917 919 Supports only Subversion so far.
918 920 """
919 921
920 922 url = None
921 923
922 924 def __init__(self):
923 925 self._cleanup_servers = []
924 926
925 927 def serve(self, vcsrepo):
926 928 if vcsrepo.alias != 'svn':
927 929 raise TypeError("Backend %s not supported" % vcsrepo.alias)
928 930
929 931 proc = subprocess32.Popen(
930 932 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
931 933 '--root', vcsrepo.path])
932 934 self._cleanup_servers.append(proc)
933 935 self.url = 'svn://localhost'
934 936
935 937 def cleanup(self):
936 938 for proc in self._cleanup_servers:
937 939 proc.terminate()
938 940
939 941
940 942 @pytest.fixture()
941 943 def pr_util(backend, request, config_stub):
942 944 """
943 945 Utility for tests of models and for functional tests around pull requests.
944 946
945 947 It gives an instance of :class:`PRTestUtility` which provides various
946 948 utility methods around one pull request.
947 949
948 950 This fixture uses `backend` and inherits its parameterization.
949 951 """
950 952
951 953 util = PRTestUtility(backend)
952 954 request.addfinalizer(util.cleanup)
953 955
954 956 return util
955 957
956 958
957 959 class PRTestUtility(object):
958 960
959 961 pull_request = None
960 962 pull_request_id = None
961 963 mergeable_patcher = None
962 964 mergeable_mock = None
963 965 notification_patcher = None
964 966
965 967 def __init__(self, backend):
966 968 self.backend = backend
967 969
968 970 def create_pull_request(
969 971 self, commits=None, target_head=None, source_head=None,
970 972 revisions=None, approved=False, author=None, mergeable=False,
971 973 enable_notifications=True, name_suffix=u'', reviewers=None, observers=None,
972 974 title=u"Test", description=u"Description"):
973 975 self.set_mergeable(mergeable)
974 976 if not enable_notifications:
975 977 # mock notification side effect
976 978 self.notification_patcher = mock.patch(
977 979 'rhodecode.model.notification.NotificationModel.create')
978 980 self.notification_patcher.start()
979 981
980 982 if not self.pull_request:
981 983 if not commits:
982 984 commits = [
983 985 {'message': 'c1'},
984 986 {'message': 'c2'},
985 987 {'message': 'c3'},
986 988 ]
987 989 target_head = 'c1'
988 990 source_head = 'c2'
989 991 revisions = ['c2']
990 992
991 993 self.commit_ids = self.backend.create_master_repo(commits)
992 994 self.target_repository = self.backend.create_repo(
993 995 heads=[target_head], name_suffix=name_suffix)
994 996 self.source_repository = self.backend.create_repo(
995 997 heads=[source_head], name_suffix=name_suffix)
996 998 self.author = author or UserModel().get_by_username(
997 999 TEST_USER_ADMIN_LOGIN)
998 1000
999 1001 model = PullRequestModel()
1000 1002 self.create_parameters = {
1001 1003 'created_by': self.author,
1002 1004 'source_repo': self.source_repository.repo_name,
1003 1005 'source_ref': self._default_branch_reference(source_head),
1004 1006 'target_repo': self.target_repository.repo_name,
1005 1007 'target_ref': self._default_branch_reference(target_head),
1006 1008 'revisions': [self.commit_ids[r] for r in revisions],
1007 1009 'reviewers': reviewers or self._get_reviewers(),
1008 1010 'observers': observers or self._get_observers(),
1009 1011 'title': title,
1010 1012 'description': description,
1011 1013 }
1012 1014 self.pull_request = model.create(**self.create_parameters)
1013 1015 assert model.get_versions(self.pull_request) == []
1014 1016
1015 1017 self.pull_request_id = self.pull_request.pull_request_id
1016 1018
1017 1019 if approved:
1018 1020 self.approve()
1019 1021
1020 1022 Session().add(self.pull_request)
1021 1023 Session().commit()
1022 1024
1023 1025 return self.pull_request
1024 1026
1025 1027 def approve(self):
1026 1028 self.create_status_votes(
1027 1029 ChangesetStatus.STATUS_APPROVED,
1028 1030 *self.pull_request.reviewers)
1029 1031
1030 1032 def close(self):
1031 1033 PullRequestModel().close_pull_request(self.pull_request, self.author)
1032 1034
1033 1035 def _default_branch_reference(self, commit_message):
1034 1036 reference = '%s:%s:%s' % (
1035 1037 'branch',
1036 1038 self.backend.default_branch_name,
1037 1039 self.commit_ids[commit_message])
1038 1040 return reference
1039 1041
1040 1042 def _get_reviewers(self):
1041 1043 role = PullRequestReviewers.ROLE_REVIEWER
1042 1044 return [
1043 1045 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
1044 1046 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
1045 1047 ]
1046 1048
1047 1049 def _get_observers(self):
1048 1050 return [
1049 1051
1050 1052 ]
1051 1053
1052 1054 def update_source_repository(self, head=None):
1053 1055 heads = [head or 'c3']
1054 1056 self.backend.pull_heads(self.source_repository, heads=heads)
1055 1057
1056 1058 def add_one_commit(self, head=None):
1057 1059 self.update_source_repository(head=head)
1058 1060 old_commit_ids = set(self.pull_request.revisions)
1059 1061 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1060 1062 commit_ids = set(self.pull_request.revisions)
1061 1063 new_commit_ids = commit_ids - old_commit_ids
1062 1064 assert len(new_commit_ids) == 1
1063 1065 return new_commit_ids.pop()
1064 1066
1065 1067 def remove_one_commit(self):
1066 1068 assert len(self.pull_request.revisions) == 2
1067 1069 source_vcs = self.source_repository.scm_instance()
1068 1070 removed_commit_id = source_vcs.commit_ids[-1]
1069 1071
1070 1072 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1071 1073 # remove the if once that's sorted out.
1072 1074 if self.backend.alias == "git":
1073 1075 kwargs = {'branch_name': self.backend.default_branch_name}
1074 1076 else:
1075 1077 kwargs = {}
1076 1078 source_vcs.strip(removed_commit_id, **kwargs)
1077 1079
1078 1080 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1079 1081 assert len(self.pull_request.revisions) == 1
1080 1082 return removed_commit_id
1081 1083
1082 1084 def create_comment(self, linked_to=None):
1083 1085 comment = CommentsModel().create(
1084 1086 text=u"Test comment",
1085 1087 repo=self.target_repository.repo_name,
1086 1088 user=self.author,
1087 1089 pull_request=self.pull_request)
1088 1090 assert comment.pull_request_version_id is None
1089 1091
1090 1092 if linked_to:
1091 1093 PullRequestModel()._link_comments_to_version(linked_to)
1092 1094
1093 1095 return comment
1094 1096
1095 1097 def create_inline_comment(
1096 1098 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1097 1099 comment = CommentsModel().create(
1098 1100 text=u"Test comment",
1099 1101 repo=self.target_repository.repo_name,
1100 1102 user=self.author,
1101 1103 line_no=line_no,
1102 1104 f_path=file_path,
1103 1105 pull_request=self.pull_request)
1104 1106 assert comment.pull_request_version_id is None
1105 1107
1106 1108 if linked_to:
1107 1109 PullRequestModel()._link_comments_to_version(linked_to)
1108 1110
1109 1111 return comment
1110 1112
1111 1113 def create_version_of_pull_request(self):
1112 1114 pull_request = self.create_pull_request()
1113 1115 version = PullRequestModel()._create_version_from_snapshot(
1114 1116 pull_request)
1115 1117 return version
1116 1118
1117 1119 def create_status_votes(self, status, *reviewers):
1118 1120 for reviewer in reviewers:
1119 1121 ChangesetStatusModel().set_status(
1120 1122 repo=self.pull_request.target_repo,
1121 1123 status=status,
1122 1124 user=reviewer.user_id,
1123 1125 pull_request=self.pull_request)
1124 1126
1125 1127 def set_mergeable(self, value):
1126 1128 if not self.mergeable_patcher:
1127 1129 self.mergeable_patcher = mock.patch.object(
1128 1130 VcsSettingsModel, 'get_general_settings')
1129 1131 self.mergeable_mock = self.mergeable_patcher.start()
1130 1132 self.mergeable_mock.return_value = {
1131 1133 'rhodecode_pr_merge_enabled': value}
1132 1134
1133 1135 def cleanup(self):
1134 1136 # In case the source repository is already cleaned up, the pull
1135 1137 # request will already be deleted.
1136 1138 pull_request = PullRequest().get(self.pull_request_id)
1137 1139 if pull_request:
1138 1140 PullRequestModel().delete(pull_request, pull_request.author)
1139 1141 Session().commit()
1140 1142
1141 1143 if self.notification_patcher:
1142 1144 self.notification_patcher.stop()
1143 1145
1144 1146 if self.mergeable_patcher:
1145 1147 self.mergeable_patcher.stop()
1146 1148
1147 1149
1148 1150 @pytest.fixture()
1149 1151 def user_admin(baseapp):
1150 1152 """
1151 1153 Provides the default admin test user as an instance of `db.User`.
1152 1154 """
1153 1155 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1154 1156 return user
1155 1157
1156 1158
1157 1159 @pytest.fixture()
1158 1160 def user_regular(baseapp):
1159 1161 """
1160 1162 Provides the default regular test user as an instance of `db.User`.
1161 1163 """
1162 1164 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1163 1165 return user
1164 1166
1165 1167
1166 1168 @pytest.fixture()
1167 1169 def user_util(request, db_connection):
1168 1170 """
1169 1171 Provides a wired instance of `UserUtility` with integrated cleanup.
1170 1172 """
1171 1173 utility = UserUtility(test_name=request.node.name)
1172 1174 request.addfinalizer(utility.cleanup)
1173 1175 return utility
1174 1176
1175 1177
1176 1178 # TODO: johbo: Split this up into utilities per domain or something similar
1177 1179 class UserUtility(object):
1178 1180
1179 1181 def __init__(self, test_name="test"):
1180 1182 self._test_name = self._sanitize_name(test_name)
1181 1183 self.fixture = Fixture()
1182 1184 self.repo_group_ids = []
1183 1185 self.repos_ids = []
1184 1186 self.user_ids = []
1185 1187 self.user_group_ids = []
1186 1188 self.user_repo_permission_ids = []
1187 1189 self.user_group_repo_permission_ids = []
1188 1190 self.user_repo_group_permission_ids = []
1189 1191 self.user_group_repo_group_permission_ids = []
1190 1192 self.user_user_group_permission_ids = []
1191 1193 self.user_group_user_group_permission_ids = []
1192 1194 self.user_permissions = []
1193 1195
1194 1196 def _sanitize_name(self, name):
1195 1197 for char in ['[', ']']:
1196 1198 name = name.replace(char, '_')
1197 1199 return name
1198 1200
1199 1201 def create_repo_group(
1200 1202 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1201 1203 group_name = "{prefix}_repogroup_{count}".format(
1202 1204 prefix=self._test_name,
1203 1205 count=len(self.repo_group_ids))
1204 1206 repo_group = self.fixture.create_repo_group(
1205 1207 group_name, cur_user=owner)
1206 1208 if auto_cleanup:
1207 1209 self.repo_group_ids.append(repo_group.group_id)
1208 1210 return repo_group
1209 1211
1210 1212 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1211 1213 auto_cleanup=True, repo_type='hg', bare=False):
1212 1214 repo_name = "{prefix}_repository_{count}".format(
1213 1215 prefix=self._test_name,
1214 1216 count=len(self.repos_ids))
1215 1217
1216 1218 repository = self.fixture.create_repo(
1217 1219 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1218 1220 if auto_cleanup:
1219 1221 self.repos_ids.append(repository.repo_id)
1220 1222 return repository
1221 1223
1222 1224 def create_user(self, auto_cleanup=True, **kwargs):
1223 1225 user_name = "{prefix}_user_{count}".format(
1224 1226 prefix=self._test_name,
1225 1227 count=len(self.user_ids))
1226 1228 user = self.fixture.create_user(user_name, **kwargs)
1227 1229 if auto_cleanup:
1228 1230 self.user_ids.append(user.user_id)
1229 1231 return user
1230 1232
1231 1233 def create_additional_user_email(self, user, email):
1232 1234 uem = self.fixture.create_additional_user_email(user=user, email=email)
1233 1235 return uem
1234 1236
1235 1237 def create_user_with_group(self):
1236 1238 user = self.create_user()
1237 1239 user_group = self.create_user_group(members=[user])
1238 1240 return user, user_group
1239 1241
1240 1242 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1241 1243 auto_cleanup=True, **kwargs):
1242 1244 group_name = "{prefix}_usergroup_{count}".format(
1243 1245 prefix=self._test_name,
1244 1246 count=len(self.user_group_ids))
1245 1247 user_group = self.fixture.create_user_group(
1246 1248 group_name, cur_user=owner, **kwargs)
1247 1249
1248 1250 if auto_cleanup:
1249 1251 self.user_group_ids.append(user_group.users_group_id)
1250 1252 if members:
1251 1253 for user in members:
1252 1254 UserGroupModel().add_user_to_group(user_group, user)
1253 1255 return user_group
1254 1256
1255 1257 def grant_user_permission(self, user_name, permission_name):
1256 1258 self.inherit_default_user_permissions(user_name, False)
1257 1259 self.user_permissions.append((user_name, permission_name))
1258 1260
1259 1261 def grant_user_permission_to_repo_group(
1260 1262 self, repo_group, user, permission_name):
1261 1263 permission = RepoGroupModel().grant_user_permission(
1262 1264 repo_group, user, permission_name)
1263 1265 self.user_repo_group_permission_ids.append(
1264 1266 (repo_group.group_id, user.user_id))
1265 1267 return permission
1266 1268
1267 1269 def grant_user_group_permission_to_repo_group(
1268 1270 self, repo_group, user_group, permission_name):
1269 1271 permission = RepoGroupModel().grant_user_group_permission(
1270 1272 repo_group, user_group, permission_name)
1271 1273 self.user_group_repo_group_permission_ids.append(
1272 1274 (repo_group.group_id, user_group.users_group_id))
1273 1275 return permission
1274 1276
1275 1277 def grant_user_permission_to_repo(
1276 1278 self, repo, user, permission_name):
1277 1279 permission = RepoModel().grant_user_permission(
1278 1280 repo, user, permission_name)
1279 1281 self.user_repo_permission_ids.append(
1280 1282 (repo.repo_id, user.user_id))
1281 1283 return permission
1282 1284
1283 1285 def grant_user_group_permission_to_repo(
1284 1286 self, repo, user_group, permission_name):
1285 1287 permission = RepoModel().grant_user_group_permission(
1286 1288 repo, user_group, permission_name)
1287 1289 self.user_group_repo_permission_ids.append(
1288 1290 (repo.repo_id, user_group.users_group_id))
1289 1291 return permission
1290 1292
1291 1293 def grant_user_permission_to_user_group(
1292 1294 self, target_user_group, user, permission_name):
1293 1295 permission = UserGroupModel().grant_user_permission(
1294 1296 target_user_group, user, permission_name)
1295 1297 self.user_user_group_permission_ids.append(
1296 1298 (target_user_group.users_group_id, user.user_id))
1297 1299 return permission
1298 1300
1299 1301 def grant_user_group_permission_to_user_group(
1300 1302 self, target_user_group, user_group, permission_name):
1301 1303 permission = UserGroupModel().grant_user_group_permission(
1302 1304 target_user_group, user_group, permission_name)
1303 1305 self.user_group_user_group_permission_ids.append(
1304 1306 (target_user_group.users_group_id, user_group.users_group_id))
1305 1307 return permission
1306 1308
1307 1309 def revoke_user_permission(self, user_name, permission_name):
1308 1310 self.inherit_default_user_permissions(user_name, True)
1309 1311 UserModel().revoke_perm(user_name, permission_name)
1310 1312
1311 1313 def inherit_default_user_permissions(self, user_name, value):
1312 1314 user = UserModel().get_by_username(user_name)
1313 1315 user.inherit_default_permissions = value
1314 1316 Session().add(user)
1315 1317 Session().commit()
1316 1318
1317 1319 def cleanup(self):
1318 1320 self._cleanup_permissions()
1319 1321 self._cleanup_repos()
1320 1322 self._cleanup_repo_groups()
1321 1323 self._cleanup_user_groups()
1322 1324 self._cleanup_users()
1323 1325
1324 1326 def _cleanup_permissions(self):
1325 1327 if self.user_permissions:
1326 1328 for user_name, permission_name in self.user_permissions:
1327 1329 self.revoke_user_permission(user_name, permission_name)
1328 1330
1329 1331 for permission in self.user_repo_permission_ids:
1330 1332 RepoModel().revoke_user_permission(*permission)
1331 1333
1332 1334 for permission in self.user_group_repo_permission_ids:
1333 1335 RepoModel().revoke_user_group_permission(*permission)
1334 1336
1335 1337 for permission in self.user_repo_group_permission_ids:
1336 1338 RepoGroupModel().revoke_user_permission(*permission)
1337 1339
1338 1340 for permission in self.user_group_repo_group_permission_ids:
1339 1341 RepoGroupModel().revoke_user_group_permission(*permission)
1340 1342
1341 1343 for permission in self.user_user_group_permission_ids:
1342 1344 UserGroupModel().revoke_user_permission(*permission)
1343 1345
1344 1346 for permission in self.user_group_user_group_permission_ids:
1345 1347 UserGroupModel().revoke_user_group_permission(*permission)
1346 1348
1347 1349 def _cleanup_repo_groups(self):
1348 1350 def _repo_group_compare(first_group_id, second_group_id):
1349 1351 """
1350 1352 Gives higher priority to the groups with the most complex paths
1351 1353 """
1352 1354 first_group = RepoGroup.get(first_group_id)
1353 1355 second_group = RepoGroup.get(second_group_id)
1354 1356 first_group_parts = (
1355 1357 len(first_group.group_name.split('/')) if first_group else 0)
1356 1358 second_group_parts = (
1357 1359 len(second_group.group_name.split('/')) if second_group else 0)
1358 1360 return cmp(second_group_parts, first_group_parts)
1359 1361
1360 1362 sorted_repo_group_ids = sorted(
1361 1363 self.repo_group_ids, cmp=_repo_group_compare)
1362 1364 for repo_group_id in sorted_repo_group_ids:
1363 1365 self.fixture.destroy_repo_group(repo_group_id)
1364 1366
1365 1367 def _cleanup_repos(self):
1366 1368 sorted_repos_ids = sorted(self.repos_ids)
1367 1369 for repo_id in sorted_repos_ids:
1368 1370 self.fixture.destroy_repo(repo_id)
1369 1371
1370 1372 def _cleanup_user_groups(self):
1371 1373 def _user_group_compare(first_group_id, second_group_id):
1372 1374 """
1373 1375 Gives higher priority to the groups with the most complex paths
1374 1376 """
1375 1377 first_group = UserGroup.get(first_group_id)
1376 1378 second_group = UserGroup.get(second_group_id)
1377 1379 first_group_parts = (
1378 1380 len(first_group.users_group_name.split('/'))
1379 1381 if first_group else 0)
1380 1382 second_group_parts = (
1381 1383 len(second_group.users_group_name.split('/'))
1382 1384 if second_group else 0)
1383 1385 return cmp(second_group_parts, first_group_parts)
1384 1386
1385 1387 sorted_user_group_ids = sorted(
1386 1388 self.user_group_ids, cmp=_user_group_compare)
1387 1389 for user_group_id in sorted_user_group_ids:
1388 1390 self.fixture.destroy_user_group(user_group_id)
1389 1391
1390 1392 def _cleanup_users(self):
1391 1393 for user_id in self.user_ids:
1392 1394 self.fixture.destroy_user(user_id)
1393 1395
1394 1396
1395 1397 # TODO: Think about moving this into a pytest-pyro package and make it a
1396 1398 # pytest plugin
1397 1399 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1398 1400 def pytest_runtest_makereport(item, call):
1399 1401 """
1400 1402 Adding the remote traceback if the exception has this information.
1401 1403
1402 1404 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1403 1405 to the exception instance.
1404 1406 """
1405 1407 outcome = yield
1406 1408 report = outcome.get_result()
1407 1409 if call.excinfo:
1408 1410 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1409 1411
1410 1412
1411 1413 def _add_vcsserver_remote_traceback(report, exc):
1412 1414 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1413 1415
1414 1416 if vcsserver_traceback:
1415 1417 section = 'VCSServer remote traceback ' + report.when
1416 1418 report.sections.append((section, vcsserver_traceback))
1417 1419
1418 1420
1419 1421 @pytest.fixture(scope='session')
1420 1422 def testrun():
1421 1423 return {
1422 1424 'uuid': uuid.uuid4(),
1423 1425 'start': datetime.datetime.utcnow().isoformat(),
1424 1426 'timestamp': int(time.time()),
1425 1427 }
1426 1428
1427 1429
1428 1430 class AppenlightClient(object):
1429 1431
1430 1432 url_template = '{url}?protocol_version=0.5'
1431 1433
1432 1434 def __init__(
1433 1435 self, url, api_key, add_server=True, add_timestamp=True,
1434 1436 namespace=None, request=None, testrun=None):
1435 1437 self.url = self.url_template.format(url=url)
1436 1438 self.api_key = api_key
1437 1439 self.add_server = add_server
1438 1440 self.add_timestamp = add_timestamp
1439 1441 self.namespace = namespace
1440 1442 self.request = request
1441 1443 self.server = socket.getfqdn(socket.gethostname())
1442 1444 self.tags_before = {}
1443 1445 self.tags_after = {}
1444 1446 self.stats = []
1445 1447 self.testrun = testrun or {}
1446 1448
1447 1449 def tag_before(self, tag, value):
1448 1450 self.tags_before[tag] = value
1449 1451
1450 1452 def tag_after(self, tag, value):
1451 1453 self.tags_after[tag] = value
1452 1454
1453 1455 def collect(self, data):
1454 1456 if self.add_server:
1455 1457 data.setdefault('server', self.server)
1456 1458 if self.add_timestamp:
1457 1459 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1458 1460 if self.namespace:
1459 1461 data.setdefault('namespace', self.namespace)
1460 1462 if self.request:
1461 1463 data.setdefault('request', self.request)
1462 1464 self.stats.append(data)
1463 1465
1464 1466 def send_stats(self):
1465 1467 tags = [
1466 1468 ('testrun', self.request),
1467 1469 ('testrun.start', self.testrun['start']),
1468 1470 ('testrun.timestamp', self.testrun['timestamp']),
1469 1471 ('test', self.namespace),
1470 1472 ]
1471 1473 for key, value in self.tags_before.items():
1472 1474 tags.append((key + '.before', value))
1473 1475 try:
1474 1476 delta = self.tags_after[key] - value
1475 1477 tags.append((key + '.delta', delta))
1476 1478 except Exception:
1477 1479 pass
1478 1480 for key, value in self.tags_after.items():
1479 1481 tags.append((key + '.after', value))
1480 1482 self.collect({
1481 1483 'message': "Collected tags",
1482 1484 'tags': tags,
1483 1485 })
1484 1486
1485 1487 response = requests.post(
1486 1488 self.url,
1487 1489 headers={
1488 1490 'X-appenlight-api-key': self.api_key},
1489 1491 json=self.stats,
1490 1492 )
1491 1493
1492 1494 if not response.status_code == 200:
1493 1495 pprint.pprint(self.stats)
1494 1496 print(response.headers)
1495 1497 print(response.text)
1496 1498 raise Exception('Sending to appenlight failed')
1497 1499
1498 1500
1499 1501 @pytest.fixture()
1500 1502 def gist_util(request, db_connection):
1501 1503 """
1502 1504 Provides a wired instance of `GistUtility` with integrated cleanup.
1503 1505 """
1504 1506 utility = GistUtility()
1505 1507 request.addfinalizer(utility.cleanup)
1506 1508 return utility
1507 1509
1508 1510
1509 1511 class GistUtility(object):
1510 1512 def __init__(self):
1511 1513 self.fixture = Fixture()
1512 1514 self.gist_ids = []
1513 1515
1514 1516 def create_gist(self, **kwargs):
1515 1517 gist = self.fixture.create_gist(**kwargs)
1516 1518 self.gist_ids.append(gist.gist_id)
1517 1519 return gist
1518 1520
1519 1521 def cleanup(self):
1520 1522 for id_ in self.gist_ids:
1521 1523 self.fixture.destroy_gists(str(id_))
1522 1524
1523 1525
1524 1526 @pytest.fixture()
1525 1527 def enabled_backends(request):
1526 1528 backends = request.config.option.backends
1527 1529 return backends[:]
1528 1530
1529 1531
1530 1532 @pytest.fixture()
1531 1533 def settings_util(request, db_connection):
1532 1534 """
1533 1535 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1534 1536 """
1535 1537 utility = SettingsUtility()
1536 1538 request.addfinalizer(utility.cleanup)
1537 1539 return utility
1538 1540
1539 1541
1540 1542 class SettingsUtility(object):
1541 1543 def __init__(self):
1542 1544 self.rhodecode_ui_ids = []
1543 1545 self.rhodecode_setting_ids = []
1544 1546 self.repo_rhodecode_ui_ids = []
1545 1547 self.repo_rhodecode_setting_ids = []
1546 1548
1547 1549 def create_repo_rhodecode_ui(
1548 1550 self, repo, section, value, key=None, active=True, cleanup=True):
1549 1551 key = key or hashlib.sha1(
1550 1552 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1551 1553
1552 1554 setting = RepoRhodeCodeUi()
1553 1555 setting.repository_id = repo.repo_id
1554 1556 setting.ui_section = section
1555 1557 setting.ui_value = value
1556 1558 setting.ui_key = key
1557 1559 setting.ui_active = active
1558 1560 Session().add(setting)
1559 1561 Session().commit()
1560 1562
1561 1563 if cleanup:
1562 1564 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1563 1565 return setting
1564 1566
1565 1567 def create_rhodecode_ui(
1566 1568 self, section, value, key=None, active=True, cleanup=True):
1567 1569 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1568 1570
1569 1571 setting = RhodeCodeUi()
1570 1572 setting.ui_section = section
1571 1573 setting.ui_value = value
1572 1574 setting.ui_key = key
1573 1575 setting.ui_active = active
1574 1576 Session().add(setting)
1575 1577 Session().commit()
1576 1578
1577 1579 if cleanup:
1578 1580 self.rhodecode_ui_ids.append(setting.ui_id)
1579 1581 return setting
1580 1582
1581 1583 def create_repo_rhodecode_setting(
1582 1584 self, repo, name, value, type_, cleanup=True):
1583 1585 setting = RepoRhodeCodeSetting(
1584 1586 repo.repo_id, key=name, val=value, type=type_)
1585 1587 Session().add(setting)
1586 1588 Session().commit()
1587 1589
1588 1590 if cleanup:
1589 1591 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1590 1592 return setting
1591 1593
1592 1594 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1593 1595 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1594 1596 Session().add(setting)
1595 1597 Session().commit()
1596 1598
1597 1599 if cleanup:
1598 1600 self.rhodecode_setting_ids.append(setting.app_settings_id)
1599 1601
1600 1602 return setting
1601 1603
1602 1604 def cleanup(self):
1603 1605 for id_ in self.rhodecode_ui_ids:
1604 1606 setting = RhodeCodeUi.get(id_)
1605 1607 Session().delete(setting)
1606 1608
1607 1609 for id_ in self.rhodecode_setting_ids:
1608 1610 setting = RhodeCodeSetting.get(id_)
1609 1611 Session().delete(setting)
1610 1612
1611 1613 for id_ in self.repo_rhodecode_ui_ids:
1612 1614 setting = RepoRhodeCodeUi.get(id_)
1613 1615 Session().delete(setting)
1614 1616
1615 1617 for id_ in self.repo_rhodecode_setting_ids:
1616 1618 setting = RepoRhodeCodeSetting.get(id_)
1617 1619 Session().delete(setting)
1618 1620
1619 1621 Session().commit()
1620 1622
1621 1623
1622 1624 @pytest.fixture()
1623 1625 def no_notifications(request):
1624 1626 notification_patcher = mock.patch(
1625 1627 'rhodecode.model.notification.NotificationModel.create')
1626 1628 notification_patcher.start()
1627 1629 request.addfinalizer(notification_patcher.stop)
1628 1630
1629 1631
1630 1632 @pytest.fixture(scope='session')
1631 1633 def repeat(request):
1632 1634 """
1633 1635 The number of repetitions is based on this fixture.
1634 1636
1635 1637 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1636 1638 tests are not too slow in our default test suite.
1637 1639 """
1638 1640 return request.config.getoption('--repeat')
1639 1641
1640 1642
1641 1643 @pytest.fixture()
1642 1644 def rhodecode_fixtures():
1643 1645 return Fixture()
1644 1646
1645 1647
1646 1648 @pytest.fixture()
1647 1649 def context_stub():
1648 1650 """
1649 1651 Stub context object.
1650 1652 """
1651 1653 context = pyramid.testing.DummyResource()
1652 1654 return context
1653 1655
1654 1656
1655 1657 @pytest.fixture()
1656 1658 def request_stub():
1657 1659 """
1658 1660 Stub request object.
1659 1661 """
1660 1662 from rhodecode.lib.base import bootstrap_request
1661 1663 request = bootstrap_request(scheme='https')
1662 1664 return request
1663 1665
1664 1666
1665 1667 @pytest.fixture()
1666 1668 def config_stub(request, request_stub):
1667 1669 """
1668 1670 Set up pyramid.testing and return the Configurator.
1669 1671 """
1670 1672 from rhodecode.lib.base import bootstrap_config
1671 1673 config = bootstrap_config(request=request_stub)
1672 1674
1673 1675 @request.addfinalizer
1674 1676 def cleanup():
1675 1677 pyramid.testing.tearDown()
1676 1678
1677 1679 return config
1678 1680
1679 1681
1680 1682 @pytest.fixture()
1681 1683 def StubIntegrationType():
1682 1684 class _StubIntegrationType(IntegrationTypeBase):
1683 1685 """ Test integration type class """
1684 1686
1685 1687 key = 'test'
1686 1688 display_name = 'Test integration type'
1687 1689 description = 'A test integration type for testing'
1688 1690
1689 1691 @classmethod
1690 1692 def icon(cls):
1691 1693 return 'test_icon_html_image'
1692 1694
1693 1695 def __init__(self, settings):
1694 1696 super(_StubIntegrationType, self).__init__(settings)
1695 1697 self.sent_events = [] # for testing
1696 1698
1697 1699 def send_event(self, event):
1698 1700 self.sent_events.append(event)
1699 1701
1700 1702 def settings_schema(self):
1701 1703 class SettingsSchema(colander.Schema):
1702 1704 test_string_field = colander.SchemaNode(
1703 1705 colander.String(),
1704 1706 missing=colander.required,
1705 1707 title='test string field',
1706 1708 )
1707 1709 test_int_field = colander.SchemaNode(
1708 1710 colander.Int(),
1709 1711 title='some integer setting',
1710 1712 )
1711 1713 return SettingsSchema()
1712 1714
1713 1715
1714 1716 integration_type_registry.register_integration_type(_StubIntegrationType)
1715 1717 return _StubIntegrationType
1716 1718
1717 1719 @pytest.fixture()
1718 1720 def stub_integration_settings():
1719 1721 return {
1720 1722 'test_string_field': 'some data',
1721 1723 'test_int_field': 100,
1722 1724 }
1723 1725
1724 1726
1725 1727 @pytest.fixture()
1726 1728 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1727 1729 stub_integration_settings):
1728 1730 integration = IntegrationModel().create(
1729 1731 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1730 1732 name='test repo integration',
1731 1733 repo=repo_stub, repo_group=None, child_repos_only=None)
1732 1734
1733 1735 @request.addfinalizer
1734 1736 def cleanup():
1735 1737 IntegrationModel().delete(integration)
1736 1738
1737 1739 return integration
1738 1740
1739 1741
1740 1742 @pytest.fixture()
1741 1743 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1742 1744 stub_integration_settings):
1743 1745 integration = IntegrationModel().create(
1744 1746 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1745 1747 name='test repogroup integration',
1746 1748 repo=None, repo_group=test_repo_group, child_repos_only=True)
1747 1749
1748 1750 @request.addfinalizer
1749 1751 def cleanup():
1750 1752 IntegrationModel().delete(integration)
1751 1753
1752 1754 return integration
1753 1755
1754 1756
1755 1757 @pytest.fixture()
1756 1758 def repogroup_recursive_integration_stub(request, test_repo_group,
1757 1759 StubIntegrationType, stub_integration_settings):
1758 1760 integration = IntegrationModel().create(
1759 1761 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1760 1762 name='test recursive repogroup integration',
1761 1763 repo=None, repo_group=test_repo_group, child_repos_only=False)
1762 1764
1763 1765 @request.addfinalizer
1764 1766 def cleanup():
1765 1767 IntegrationModel().delete(integration)
1766 1768
1767 1769 return integration
1768 1770
1769 1771
1770 1772 @pytest.fixture()
1771 1773 def global_integration_stub(request, StubIntegrationType,
1772 1774 stub_integration_settings):
1773 1775 integration = IntegrationModel().create(
1774 1776 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1775 1777 name='test global integration',
1776 1778 repo=None, repo_group=None, child_repos_only=None)
1777 1779
1778 1780 @request.addfinalizer
1779 1781 def cleanup():
1780 1782 IntegrationModel().delete(integration)
1781 1783
1782 1784 return integration
1783 1785
1784 1786
1785 1787 @pytest.fixture()
1786 1788 def root_repos_integration_stub(request, StubIntegrationType,
1787 1789 stub_integration_settings):
1788 1790 integration = IntegrationModel().create(
1789 1791 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1790 1792 name='test global integration',
1791 1793 repo=None, repo_group=None, child_repos_only=True)
1792 1794
1793 1795 @request.addfinalizer
1794 1796 def cleanup():
1795 1797 IntegrationModel().delete(integration)
1796 1798
1797 1799 return integration
1798 1800
1799 1801
1800 1802 @pytest.fixture()
1801 1803 def local_dt_to_utc():
1802 1804 def _factory(dt):
1803 1805 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1804 1806 dateutil.tz.tzutc()).replace(tzinfo=None)
1805 1807 return _factory
1806 1808
1807 1809
1808 1810 @pytest.fixture()
1809 1811 def disable_anonymous_user(request, baseapp):
1810 1812 set_anonymous_access(False)
1811 1813
1812 1814 @request.addfinalizer
1813 1815 def cleanup():
1814 1816 set_anonymous_access(True)
1815 1817
1816 1818
1817 1819 @pytest.fixture(scope='module')
1818 1820 def rc_fixture(request):
1819 1821 return Fixture()
1820 1822
1821 1823
1822 1824 @pytest.fixture()
1823 1825 def repo_groups(request):
1824 1826 fixture = Fixture()
1825 1827
1826 1828 session = Session()
1827 1829 zombie_group = fixture.create_repo_group('zombie')
1828 1830 parent_group = fixture.create_repo_group('parent')
1829 1831 child_group = fixture.create_repo_group('parent/child')
1830 1832 groups_in_db = session.query(RepoGroup).all()
1831 1833 assert len(groups_in_db) == 3
1832 1834 assert child_group.group_parent_id == parent_group.group_id
1833 1835
1834 1836 @request.addfinalizer
1835 1837 def cleanup():
1836 1838 fixture.destroy_repo_group(zombie_group)
1837 1839 fixture.destroy_repo_group(child_group)
1838 1840 fixture.destroy_repo_group(parent_group)
1839 1841
1840 1842 return zombie_group, parent_group, child_group
@@ -1,285 +1,285 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import json
22 22 import platform
23 23 import socket
24 24 import random
25 25 import pytest
26 26
27 27 from rhodecode.lib.pyramid_utils import get_app_config
28 28 from rhodecode.tests.fixture import TestINI
29 29 from rhodecode.tests.server_utils import RcVCSServer
30 30
31 31
32 32 def _parse_json(value):
33 33 return json.loads(value) if value else None
34 34
35 35
36 36 def pytest_addoption(parser):
37 37 parser.addoption(
38 38 '--test-loglevel', dest='test_loglevel',
39 39 help="Set default Logging level for tests, critical(default), error, warn , info, debug")
40 40 group = parser.getgroup('pylons')
41 41 group.addoption(
42 42 '--with-pylons', dest='pyramid_config',
43 43 help="Set up a Pylons environment with the specified config file.")
44 44 group.addoption(
45 45 '--ini-config-override', action='store', type=_parse_json,
46 46 default=None, dest='pyramid_config_override', help=(
47 47 "Overrides the .ini file settings. Should be specified in JSON"
48 48 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
49 49 )
50 50 )
51 51 parser.addini(
52 52 'pyramid_config',
53 53 "Set up a Pyramid environment with the specified config file.")
54 54
55 55 vcsgroup = parser.getgroup('vcs')
56 56 vcsgroup.addoption(
57 57 '--without-vcsserver', dest='with_vcsserver', action='store_false',
58 58 help="Do not start the VCSServer in a background process.")
59 59 vcsgroup.addoption(
60 60 '--with-vcsserver-http', dest='vcsserver_config_http',
61 61 help="Start the HTTP VCSServer with the specified config file.")
62 62 vcsgroup.addoption(
63 63 '--vcsserver-protocol', dest='vcsserver_protocol',
64 64 help="Start the VCSServer with HTTP protocol support.")
65 65 vcsgroup.addoption(
66 66 '--vcsserver-config-override', action='store', type=_parse_json,
67 67 default=None, dest='vcsserver_config_override', help=(
68 68 "Overrides the .ini file settings for the VCSServer. "
69 69 "Should be specified in JSON "
70 70 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
71 71 )
72 72 )
73 73 vcsgroup.addoption(
74 74 '--vcsserver-port', action='store', type=int,
75 75 default=None, help=(
76 76 "Allows to set the port of the vcsserver. Useful when testing "
77 77 "against an already running server and random ports cause "
78 78 "trouble."))
79 79 parser.addini(
80 80 'vcsserver_config_http',
81 81 "Start the HTTP VCSServer with the specified config file.")
82 82 parser.addini(
83 83 'vcsserver_protocol',
84 84 "Start the VCSServer with HTTP protocol support.")
85 85
86 86
87 87 @pytest.fixture(scope='session')
88 88 def vcsserver(request, vcsserver_port, vcsserver_factory):
89 89 """
90 90 Session scope VCSServer.
91 91
92 92 Tests wich need the VCSServer have to rely on this fixture in order
93 93 to ensure it will be running.
94 94
95 95 For specific needs, the fixture vcsserver_factory can be used. It allows to
96 96 adjust the configuration file for the test run.
97 97
98 98 Command line args:
99 99
100 100 --without-vcsserver: Allows to switch this fixture off. You have to
101 101 manually start the server.
102 102
103 103 --vcsserver-port: Will expect the VCSServer to listen on this port.
104 104 """
105 105
106 106 if not request.config.getoption('with_vcsserver'):
107 107 return None
108 108
109 109 return vcsserver_factory(
110 110 request, vcsserver_port=vcsserver_port)
111 111
112 112
113 113 @pytest.fixture(scope='session')
114 114 def vcsserver_factory(tmpdir_factory):
115 115 """
116 116 Use this if you need a running vcsserver with a special configuration.
117 117 """
118 118
119 119 def factory(request, overrides=(), vcsserver_port=None,
120 120 log_file=None):
121 121
122 122 if vcsserver_port is None:
123 123 vcsserver_port = get_available_port()
124 124
125 125 overrides = list(overrides)
126 126 overrides.append({'server:main': {'port': vcsserver_port}})
127 127
128 128 option_name = 'vcsserver_config_http'
129 129 override_option_name = 'vcsserver_config_override'
130 130 config_file = get_config(
131 131 request.config, option_name=option_name,
132 132 override_option_name=override_option_name, overrides=overrides,
133 133 basetemp=tmpdir_factory.getbasetemp().strpath,
134 134 prefix='test_vcs_')
135 135
136 136 server = RcVCSServer(config_file, log_file)
137 137 server.start()
138 138
139 139 @request.addfinalizer
140 140 def cleanup():
141 141 server.shutdown()
142 142
143 143 server.wait_until_ready()
144 144 return server
145 145
146 146 return factory
147 147
148 148
149 149 def is_cygwin():
150 150 return 'cygwin' in platform.system().lower()
151 151
152 152
153 153 def _use_log_level(config):
154 154 level = config.getoption('test_loglevel') or 'critical'
155 155 return level.upper()
156 156
157 157
158 158 @pytest.fixture(scope='session')
159 159 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
160 160 option_name = 'pyramid_config'
161 161 log_level = _use_log_level(request.config)
162 162
163 163 overrides = [
164 164 {'server:main': {'port': rcserver_port}},
165 165 {'app:main': {
166 166 'vcs.server': 'localhost:%s' % vcsserver_port,
167 167 # johbo: We will always start the VCSServer on our own based on the
168 168 # fixtures of the test cases. For the test run it must always be
169 169 # off in the INI file.
170 170 'vcs.start_server': 'false',
171 171
172 172 'vcs.server.protocol': 'http',
173 173 'vcs.scm_app_implementation': 'http',
174 174 'vcs.hooks.protocol': 'http',
175 175 'vcs.hooks.host': '127.0.0.1',
176 176 }},
177 177
178 178 {'handler_console': {
179 179 'class ': 'StreamHandler',
180 180 'args ': '(sys.stderr,)',
181 181 'level': log_level,
182 182 }},
183 183
184 184 ]
185 185
186 186 filename = get_config(
187 187 request.config, option_name=option_name,
188 188 override_option_name='{}_override'.format(option_name),
189 189 overrides=overrides,
190 190 basetemp=tmpdir_factory.getbasetemp().strpath,
191 191 prefix='test_rce_')
192 192 return filename
193 193
194 194
195 195 @pytest.fixture(scope='session')
196 196 def ini_settings(ini_config):
197 197 ini_path = ini_config
198 198 return get_app_config(ini_path)
199 199
200 200
201 201 def get_available_port(min_port=40000, max_port=55555):
202 202 from rhodecode.lib.utils2 import get_available_port as _get_port
203 203 return _get_port(min_port, max_port)
204 204
205 205
206 206 @pytest.fixture(scope='session')
207 207 def rcserver_port(request):
208 208 port = get_available_port()
209 print('Using rcserver port {}'.format(port))
209 print('Using rhodecode port {}'.format(port))
210 210 return port
211 211
212 212
213 213 @pytest.fixture(scope='session')
214 214 def vcsserver_port(request):
215 215 port = request.config.getoption('--vcsserver-port')
216 216 if port is None:
217 217 port = get_available_port()
218 218 print('Using vcsserver port {}'.format(port))
219 219 return port
220 220
221 221
222 222 @pytest.fixture(scope='session')
223 223 def available_port_factory():
224 224 """
225 225 Returns a callable which returns free port numbers.
226 226 """
227 227 return get_available_port
228 228
229 229
230 230 @pytest.fixture()
231 231 def available_port(available_port_factory):
232 232 """
233 233 Gives you one free port for the current test.
234 234
235 235 Uses "available_port_factory" to retrieve the port.
236 236 """
237 237 return available_port_factory()
238 238
239 239
240 240 @pytest.fixture(scope='session')
241 241 def testini_factory(tmpdir_factory, ini_config):
242 242 """
243 243 Factory to create an INI file based on TestINI.
244 244
245 245 It will make sure to place the INI file in the correct directory.
246 246 """
247 247 basetemp = tmpdir_factory.getbasetemp().strpath
248 248 return TestIniFactory(basetemp, ini_config)
249 249
250 250
251 251 class TestIniFactory(object):
252 252
253 253 def __init__(self, basetemp, template_ini):
254 254 self._basetemp = basetemp
255 255 self._template_ini = template_ini
256 256
257 257 def __call__(self, ini_params, new_file_prefix='test'):
258 258 ini_file = TestINI(
259 259 self._template_ini, ini_params=ini_params,
260 260 new_file_prefix=new_file_prefix, dir=self._basetemp)
261 261 result = ini_file.create()
262 262 return result
263 263
264 264
265 265 def get_config(
266 266 config, option_name, override_option_name, overrides=None,
267 267 basetemp=None, prefix='test'):
268 268 """
269 269 Find a configuration file and apply overrides for the given `prefix`.
270 270 """
271 271 config_file = (
272 272 config.getoption(option_name) or config.getini(option_name))
273 273 if not config_file:
274 274 pytest.exit(
275 275 "Configuration error, could not extract {}.".format(option_name))
276 276
277 277 overrides = overrides or []
278 278 config_override = config.getoption(override_option_name)
279 279 if config_override:
280 280 overrides.append(config_override)
281 281 temp_ini_file = TestINI(
282 282 config_file, ini_params=overrides, new_file_prefix=prefix,
283 283 dir=basetemp)
284 284
285 285 return temp_ini_file.create()
@@ -1,197 +1,200 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import os
23 23 import time
24 24 import tempfile
25 25 import pytest
26 26 import subprocess32
27 27 import configobj
28 import logging
28 29
29 30 from urllib2 import urlopen, URLError
30 31 from pyramid.compat import configparser
31 32
32 33
33 34 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
34 35 from rhodecode.tests.utils import is_url_reachable
35 36
37 log = logging.getLogger(__name__)
38
36 39
37 40 def get_port(pyramid_config):
38 41 config = configparser.ConfigParser()
39 42 config.read(pyramid_config)
40 43 return config.get('server:main', 'port')
41 44
42 45
43 46 def get_host_url(pyramid_config):
44 47 """Construct the host url using the port in the test configuration."""
45 48 return '127.0.0.1:%s' % get_port(pyramid_config)
46 49
47 50
48 51 def assert_no_running_instance(url):
49 52 if is_url_reachable(url):
50 53 print("Hint: Usually this means another instance of server "
51 54 "is running in the background at %s." % url)
52 55 pytest.fail(
53 56 "Port is not free at %s, cannot start server at" % url)
54 57
55 58
56 59 class ServerBase(object):
57 60 _args = []
58 61 log_file_name = 'NOT_DEFINED.log'
59 62 status_url_tmpl = 'http://{host}:{port}'
60 63
61 64 def __init__(self, config_file, log_file):
62 65 self.config_file = config_file
63 66 config_data = configobj.ConfigObj(config_file)
64 67 self._config = config_data['server:main']
65 68
66 69 self._args = []
67 70 self.log_file = log_file or os.path.join(
68 71 tempfile.gettempdir(), self.log_file_name)
69 72 self.process = None
70 73 self.server_out = None
71 print("Using the {} configuration:{}".format(
74 log.info("Using the {} configuration:{}".format(
72 75 self.__class__.__name__, config_file))
73 76
74 77 if not os.path.isfile(config_file):
75 78 raise RuntimeError('Failed to get config at {}'.format(config_file))
76 79
77 80 @property
78 81 def command(self):
79 82 return ' '.join(self._args)
80 83
81 84 @property
82 85 def http_url(self):
83 86 template = 'http://{host}:{port}/'
84 87 return template.format(**self._config)
85 88
86 89 def host_url(self):
87 90 return 'http://' + get_host_url(self.config_file)
88 91
89 92 def get_rc_log(self):
90 93 with open(self.log_file) as f:
91 94 return f.read()
92 95
93 96 def wait_until_ready(self, timeout=30):
94 97 host = self._config['host']
95 98 port = self._config['port']
96 99 status_url = self.status_url_tmpl.format(host=host, port=port)
97 100 start = time.time()
98 101
99 102 while time.time() - start < timeout:
100 103 try:
101 104 urlopen(status_url)
102 105 break
103 106 except URLError:
104 107 time.sleep(0.2)
105 108 else:
106 109 pytest.fail(
107 110 "Starting the {} failed or took more than {} "
108 111 "seconds. cmd: `{}`".format(
109 112 self.__class__.__name__, timeout, self.command))
110 113
111 print('Server of {} ready at url {}'.format(
114 log.info('Server of {} ready at url {}'.format(
112 115 self.__class__.__name__, status_url))
113 116
114 117 def shutdown(self):
115 118 self.process.kill()
116 119 self.server_out.flush()
117 120 self.server_out.close()
118 121
119 122 def get_log_file_with_port(self):
120 123 log_file = list(self.log_file.partition('.log'))
121 124 log_file.insert(1, get_port(self.config_file))
122 125 log_file = ''.join(log_file)
123 126 return log_file
124 127
125 128
126 129 class RcVCSServer(ServerBase):
127 130 """
128 131 Represents a running VCSServer instance.
129 132 """
130 133
131 134 log_file_name = 'rc-vcsserver.log'
132 135 status_url_tmpl = 'http://{host}:{port}/status'
133 136
134 137 def __init__(self, config_file, log_file=None):
135 138 super(RcVCSServer, self).__init__(config_file, log_file)
136 139 self._args = ['gunicorn', '--paste', self.config_file]
137 140
138 141 def start(self):
139 142 env = os.environ.copy()
140 143
141 144 self.log_file = self.get_log_file_with_port()
142 145 self.server_out = open(self.log_file, 'w')
143 146
144 147 host_url = self.host_url()
145 148 assert_no_running_instance(host_url)
146 149
147 print('rhodecode-vcsserver start command: {}'.format(' '.join(self._args)))
148 print('rhodecode-vcsserver starting at: {}'.format(host_url))
149 print('rhodecode-vcsserver command: {}'.format(self.command))
150 print('rhodecode-vcsserver logfile: {}'.format(self.log_file))
150 log.info('rhodecode-vcsserver start command: {}'.format(' '.join(self._args)))
151 log.info('rhodecode-vcsserver starting at: {}'.format(host_url))
152 log.info('rhodecode-vcsserver command: {}'.format(self.command))
153 log.info('rhodecode-vcsserver logfile: {}'.format(self.log_file))
151 154
152 155 self.process = subprocess32.Popen(
153 156 self._args, bufsize=0, env=env,
154 157 stdout=self.server_out, stderr=self.server_out)
155 158
156 159
157 160 class RcWebServer(ServerBase):
158 161 """
159 162 Represents a running RCE web server used as a test fixture.
160 163 """
161 164
162 165 log_file_name = 'rc-web.log'
163 166 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
164 167
165 168 def __init__(self, config_file, log_file=None):
166 169 super(RcWebServer, self).__init__(config_file, log_file)
167 170 self._args = [
168 171 'gunicorn', '--worker-class', 'gevent', '--paste', config_file]
169 172
170 173 def start(self):
171 174 env = os.environ.copy()
172 175 env['RC_NO_TMP_PATH'] = '1'
173 176
174 177 self.log_file = self.get_log_file_with_port()
175 178 self.server_out = open(self.log_file, 'w')
176 179
177 180 host_url = self.host_url()
178 181 assert_no_running_instance(host_url)
179 182
180 print('rhodecode-web starting at: {}'.format(host_url))
181 print('rhodecode-web command: {}'.format(self.command))
182 print('rhodecode-web logfile: {}'.format(self.log_file))
183 log.info('rhodecode-web starting at: {}'.format(host_url))
184 log.info('rhodecode-web command: {}'.format(self.command))
185 log.info('rhodecode-web logfile: {}'.format(self.log_file))
183 186
184 187 self.process = subprocess32.Popen(
185 188 self._args, bufsize=0, env=env,
186 189 stdout=self.server_out, stderr=self.server_out)
187 190
188 191 def repo_clone_url(self, repo_name, **kwargs):
189 192 params = {
190 193 'user': TEST_USER_ADMIN_LOGIN,
191 194 'passwd': TEST_USER_ADMIN_PASS,
192 195 'host': get_host_url(self.config_file),
193 196 'cloned_repo': repo_name,
194 197 }
195 198 params.update(**kwargs)
196 199 _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params
197 200 return _url
@@ -1,193 +1,193 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base for test suite for making push/pull operations.
23 23
24 24 .. important::
25 25
26 26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 27 to redirect things to stderr instead of stdout.
28 28 """
29 29
30 30 from os.path import join as jn
31 31 from subprocess32 import Popen, PIPE
32 32 import logging
33 33 import os
34 34 import tempfile
35 35
36 36 from rhodecode.tests import GIT_REPO, HG_REPO
37 37
38 38 DEBUG = True
39 39 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
40 40 REPO_GROUP = 'a_repo_group'
41 41 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
42 42 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
43 43
44 44 log = logging.getLogger(__name__)
45 45
46 46
47 47 class Command(object):
48 48
49 49 def __init__(self, cwd):
50 50 self.cwd = cwd
51 51 self.process = None
52 52
53 53 def execute(self, cmd, *args):
54 54 """
55 55 Runs command on the system with given ``args``.
56 56 """
57 57
58 58 command = cmd + ' ' + ' '.join(args)
59 59 if DEBUG:
60 60 log.debug('*** CMD %s ***', command)
61 61
62 62 env = dict(os.environ)
63 63 # Delete coverage variables, as they make the test fail for Mercurial
64 64 for key in env.keys():
65 65 if key.startswith('COV_CORE_'):
66 66 del env[key]
67 67
68 68 self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
69 69 cwd=self.cwd, env=env)
70 70 stdout, stderr = self.process.communicate()
71 71 if DEBUG:
72 72 log.debug('STDOUT:%s', stdout)
73 73 log.debug('STDERR:%s', stderr)
74 74 return stdout, stderr
75 75
76 76 def assert_returncode_success(self):
77 77 assert self.process.returncode == 0
78 78
79 79
80 def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None,
81 new_branch=False, **kwargs):
80 def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None, new_branch=False, **kwargs):
82 81 git_ident = "git config user.name {} && git config user.email {}".format(
83 82 'Marcin KuΕΊminski', 'me@email.com')
84 83 cwd = path = jn(dest)
85 84
86 85 tags = tags or []
87 added_file = jn(path, '%ssetup.py' % tempfile._RandomNameSequence().next())
86 added_file = jn(path, '%s_setup.py' % tempfile._RandomNameSequence().next())
88 87 Command(cwd).execute('touch %s' % added_file)
89 88 Command(cwd).execute('%s add %s' % (vcs, added_file))
90 89 author_str = 'Marcin KuΕΊminski <me@email.com>'
91 90
92 91 for i in range(kwargs.get('files_no', 3)):
93 92 cmd = """echo 'added_line%s' >> %s""" % (i, added_file)
94 93 Command(cwd).execute(cmd)
94
95 95 if vcs == 'hg':
96 cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % (
96 cmd = """hg commit -m 'committed new %s' -u '%s' %s """ % (
97 97 i, author_str, added_file
98 98 )
99 99 elif vcs == 'git':
100 cmd = """%s && git commit -m 'commited new %s' %s""" % (
100 cmd = """%s && git commit -m 'committed new %s' %s""" % (
101 101 git_ident, i, added_file)
102 102 Command(cwd).execute(cmd)
103 103
104 104 for tag in tags:
105 105 if vcs == 'hg':
106 106 Command(cwd).execute(
107 'hg tag', tag['name'])
107 'hg tag -m "{}" -u "{}" '.format(tag['commit'], author_str), tag['name'])
108 108 elif vcs == 'git':
109 109 if tag['commit']:
110 110 # annotated tag
111 111 _stdout, _stderr = Command(cwd).execute(
112 112 """%s && git tag -a %s -m "%s" """ % (
113 113 git_ident, tag['name'], tag['commit']))
114 114 else:
115 115 # lightweight tag
116 116 _stdout, _stderr = Command(cwd).execute(
117 117 """%s && git tag %s""" % (
118 118 git_ident, tag['name']))
119 119
120 120
121 121 def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None,
122 122 new_branch=False, **kwargs):
123 123 """
124 124 Generate some files, add it to DEST repo and push back
125 125 vcs is git or hg and defines what VCS we want to make those files for
126 126 """
127 127 git_ident = "git config user.name {} && git config user.email {}".format(
128 128 'Marcin KuΕΊminski', 'me@email.com')
129 129 cwd = path = jn(dest)
130 130
131 131 # commit some stuff into this repo
132 132 _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs)
133 133
134 134 default_target_branch = {
135 135 'git': 'master',
136 136 'hg': 'default'
137 137 }.get(vcs)
138 138
139 139 target_branch = target_branch or default_target_branch
140 140
141 141 # PUSH it back
142 142 stdout = stderr = None
143 143 if vcs == 'hg':
144 144 maybe_new_branch = ''
145 145 if new_branch:
146 146 maybe_new_branch = '--new-branch'
147 147 stdout, stderr = Command(cwd).execute(
148 148 'hg push --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url)
149 149 )
150 150 elif vcs == 'git':
151 151 stdout, stderr = Command(cwd).execute(
152 152 """{} &&
153 153 git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch)
154 154 )
155 155
156 156 return stdout, stderr
157 157
158 158
159 159 def _check_proper_git_push(
160 160 stdout, stderr, branch='master', should_set_default_branch=False):
161 161 # Note: Git is writing most information to stderr intentionally
162 162 assert 'fatal' not in stderr
163 163 assert 'rejected' not in stderr
164 164 assert 'Pushing to' in stderr
165 165 assert '%s -> %s' % (branch, branch) in stderr
166 166
167 167 if should_set_default_branch:
168 168 assert "Setting default branch to %s" % branch in stderr
169 169 else:
170 170 assert "Setting default branch" not in stderr
171 171
172 172
173 173 def _check_proper_hg_push(stdout, stderr, branch='default'):
174 174 assert 'pushing to' in stdout
175 175 assert 'searching for changes' in stdout
176 176
177 177 assert 'abort:' not in stderr
178 178
179 179
180 180 def _check_proper_clone(stdout, stderr, vcs):
181 181 if vcs == 'hg':
182 182 assert 'requesting all changes' in stdout
183 183 assert 'adding changesets' in stdout
184 184 assert 'adding manifests' in stdout
185 185 assert 'adding file changes' in stdout
186 186
187 187 assert stderr == ''
188 188
189 189 if vcs == 'git':
190 190 assert '' == stdout
191 191 assert 'Cloning into' in stderr
192 192 assert 'abort:' not in stderr
193 193 assert 'fatal:' not in stderr
@@ -1,342 +1,345 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 py.test config for test suite for making push/pull operations.
23 23
24 24 .. important::
25 25
26 26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 27 to redirect things to stderr instead of stdout.
28 28 """
29 29
30 30 import os
31 31 import tempfile
32 32 import textwrap
33 33 import pytest
34 import logging
34 35
35 36 from rhodecode import events
36 37 from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \
37 38 UserToRepoBranchPermission, User
38 39 from rhodecode.model.integration import IntegrationModel
39 40 from rhodecode.model.db import Repository
40 41 from rhodecode.model.meta import Session
41 42 from rhodecode.model.settings import SettingsModel
42 43 from rhodecode.integrations.types.webhook import WebhookIntegrationType
43 44
44 45 from rhodecode.tests import GIT_REPO, HG_REPO
45 46 from rhodecode.tests.fixture import Fixture
46 47 from rhodecode.tests.server_utils import RcWebServer
47 48
48 49 REPO_GROUP = 'a_repo_group'
49 50 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
50 51 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
51 52
53 log = logging.getLogger(__name__)
54
52 55
53 56 @pytest.fixture(scope="module")
54 57 def rcextensions(request, db_connection, tmpdir_factory):
55 58 """
56 59 Installs a testing rcextensions pack to ensure they work as expected.
57 60 """
58 61 init_content = textwrap.dedent("""
59 62 # Forward import the example rcextensions to make it
60 63 # active for our tests.
61 64 from rhodecode.tests.other.example_rcextensions import *
62 65 """)
63 66
64 67 # Note: rcextensions are looked up based on the path of the ini file
65 68 root_path = tmpdir_factory.getbasetemp()
66 69 rcextensions_path = root_path.join('rcextensions')
67 70 init_path = rcextensions_path.join('__init__.py')
68 71
69 72 if rcextensions_path.check():
70 73 pytest.fail(
71 74 "Path for rcextensions already exists, please clean up before "
72 75 "test run this path: %s" % (rcextensions_path, ))
73 76 return
74 77
75 78 request.addfinalizer(rcextensions_path.remove)
76 79 init_path.write_binary(init_content, ensure=True)
77 80
78 81
79 82 @pytest.fixture(scope="module")
80 83 def repos(request, db_connection):
81 84 """Create a copy of each test repo in a repo group."""
82 85 fixture = Fixture()
83 86 repo_group = fixture.create_repo_group(REPO_GROUP)
84 87 repo_group_id = repo_group.group_id
85 88 fixture.create_fork(HG_REPO, HG_REPO,
86 89 repo_name_full=HG_REPO_WITH_GROUP,
87 90 repo_group=repo_group_id)
88 91 fixture.create_fork(GIT_REPO, GIT_REPO,
89 92 repo_name_full=GIT_REPO_WITH_GROUP,
90 93 repo_group=repo_group_id)
91 94
92 95 @request.addfinalizer
93 96 def cleanup():
94 97 fixture.destroy_repo(HG_REPO_WITH_GROUP)
95 98 fixture.destroy_repo(GIT_REPO_WITH_GROUP)
96 99 fixture.destroy_repo_group(repo_group_id)
97 100
98 101
99 102 @pytest.fixture(scope="module")
100 103 def rc_web_server_config_modification():
101 104 return []
102 105
103 106
104 107 @pytest.fixture(scope="module")
105 108 def rc_web_server_config_factory(testini_factory, rc_web_server_config_modification):
106 109 """
107 110 Configuration file used for the fixture `rc_web_server`.
108 111 """
109 112
110 113 def factory(rcweb_port, vcsserver_port):
111 114 custom_params = [
112 115 {'handler_console': {'level': 'DEBUG'}},
113 116 {'server:main': {'port': rcweb_port}},
114 117 {'app:main': {'vcs.server': 'localhost:%s' % vcsserver_port}}
115 118 ]
116 119 custom_params.extend(rc_web_server_config_modification)
117 120 return testini_factory(custom_params)
118 121 return factory
119 122
120 123
121 124 @pytest.fixture(scope="module")
122 125 def rc_web_server(
123 126 request, vcsserver_factory, available_port_factory,
124 127 rc_web_server_config_factory, repos, rcextensions):
125 128 """
126 129 Run the web server as a subprocess. with it's own instance of vcsserver
127 130 """
128 131 rcweb_port = available_port_factory()
129 print('Using rcweb ops test port {}'.format(rcweb_port))
132 log.info('Using rcweb ops test port {}'.format(rcweb_port))
130 133
131 134 vcsserver_port = available_port_factory()
132 print('Using vcsserver ops test port {}'.format(vcsserver_port))
135 log.info('Using vcsserver ops test port {}'.format(vcsserver_port))
133 136
134 137 vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log')
135 138 vcsserver_factory(
136 139 request, vcsserver_port=vcsserver_port,
137 140 log_file=vcs_log,
138 141 overrides=(
139 142 {'server:main': {'workers': 2}},
140 143 {'server:main': {'graceful_timeout': 10}},
141 144 ))
142 145
143 146 rc_log = os.path.join(tempfile.gettempdir(), 'rc_op_web.log')
144 147 rc_web_server_config = rc_web_server_config_factory(
145 148 rcweb_port=rcweb_port,
146 149 vcsserver_port=vcsserver_port)
147 150 server = RcWebServer(rc_web_server_config, log_file=rc_log)
148 151 server.start()
149 152
150 153 @request.addfinalizer
151 154 def cleanup():
152 155 server.shutdown()
153 156
154 157 server.wait_until_ready()
155 158 return server
156 159
157 160
158 161 @pytest.fixture()
159 162 def disable_locking(baseapp):
160 163 r = Repository.get_by_repo_name(GIT_REPO)
161 164 Repository.unlock(r)
162 165 r.enable_locking = False
163 166 Session().add(r)
164 167 Session().commit()
165 168
166 169 r = Repository.get_by_repo_name(HG_REPO)
167 170 Repository.unlock(r)
168 171 r.enable_locking = False
169 172 Session().add(r)
170 173 Session().commit()
171 174
172 175
173 176 @pytest.fixture()
174 177 def enable_auth_plugins(request, baseapp, csrf_token):
175 178 """
176 179 Return a factory object that when called, allows to control which
177 180 authentication plugins are enabled.
178 181 """
179 182 def _enable_plugins(plugins_list, override=None):
180 183 override = override or {}
181 184 params = {
182 185 'auth_plugins': ','.join(plugins_list),
183 186 }
184 187
185 188 # helper translate some names to others
186 189 name_map = {
187 190 'token': 'authtoken'
188 191 }
189 192
190 193 for module in plugins_list:
191 194 plugin_name = module.partition('#')[-1]
192 195 if plugin_name in name_map:
193 196 plugin_name = name_map[plugin_name]
194 197 enabled_plugin = 'auth_%s_enabled' % plugin_name
195 198 cache_ttl = 'auth_%s_cache_ttl' % plugin_name
196 199
197 200 # default params that are needed for each plugin,
198 201 # `enabled` and `cache_ttl`
199 202 params.update({
200 203 enabled_plugin: True,
201 204 cache_ttl: 0
202 205 })
203 206 if override.get:
204 207 params.update(override.get(module, {}))
205 208
206 209 validated_params = params
207 210 for k, v in validated_params.items():
208 211 setting = SettingsModel().create_or_update_setting(k, v)
209 212 Session().add(setting)
210 213 Session().commit()
211 214
212 215 SettingsModel().invalidate_settings_cache()
213 216
214 217 def cleanup():
215 218 _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode'])
216 219
217 220 request.addfinalizer(cleanup)
218 221
219 222 return _enable_plugins
220 223
221 224
222 225 @pytest.fixture()
223 226 def fs_repo_only(request, rhodecode_fixtures):
224 227 def fs_repo_fabric(repo_name, repo_type):
225 228 rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type)
226 229 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=False)
227 230
228 231 def cleanup():
229 232 rhodecode_fixtures.destroy_repo(repo_name, fs_remove=True)
230 233 rhodecode_fixtures.destroy_repo_on_filesystem(repo_name)
231 234
232 235 request.addfinalizer(cleanup)
233 236
234 237 return fs_repo_fabric
235 238
236 239
237 240 @pytest.fixture()
238 241 def enable_webhook_push_integration(request):
239 242 integration = Integration()
240 243 integration.integration_type = WebhookIntegrationType.key
241 244 Session().add(integration)
242 245
243 246 settings = dict(
244 247 url='http://httpbin.org/post',
245 248 secret_token='secret',
246 249 username=None,
247 250 password=None,
248 251 custom_header_key=None,
249 252 custom_header_val=None,
250 253 method_type='post',
251 254 events=[events.RepoPushEvent.name],
252 255 log_data=True
253 256 )
254 257
255 258 IntegrationModel().update_integration(
256 259 integration,
257 260 name='IntegrationWebhookTest',
258 261 enabled=True,
259 262 settings=settings,
260 263 repo=None,
261 264 repo_group=None,
262 265 child_repos_only=False,
263 266 )
264 267 Session().commit()
265 268 integration_id = integration.integration_id
266 269
267 270 @request.addfinalizer
268 271 def cleanup():
269 272 integration = Integration.get(integration_id)
270 273 Session().delete(integration)
271 274 Session().commit()
272 275
273 276
274 277 @pytest.fixture()
275 278 def branch_permission_setter(request):
276 279 """
277 280
278 281 def my_test(branch_permission_setter)
279 282 branch_permission_setter(repo_name, username, pattern='*', permission='branch.push')
280 283
281 284 """
282 285
283 286 rule_id = None
284 287 write_perm_id = None
285 288 write_perm = None
286 289 rule = None
287 290
288 291 def _branch_permissions_setter(
289 292 repo_name, username, pattern='*', permission='branch.push_force'):
290 293 global rule_id, write_perm_id
291 294 global rule, write_perm
292 295
293 296 repo = Repository.get_by_repo_name(repo_name)
294 297 repo_id = repo.repo_id
295 298
296 299 user = User.get_by_username(username)
297 300 user_id = user.user_id
298 301
299 302 rule_perm_obj = Permission.get_by_key(permission)
300 303
301 304 # add new entry, based on existing perm entry
302 305 perm = UserRepoToPerm.query() \
303 306 .filter(UserRepoToPerm.repository_id == repo_id) \
304 307 .filter(UserRepoToPerm.user_id == user_id) \
305 308 .first()
306 309
307 310 if not perm:
308 311 # such user isn't defined in Permissions for repository
309 312 # we now on-the-fly add new permission
310 313
311 314 write_perm = UserRepoToPerm()
312 315 write_perm.permission = Permission.get_by_key('repository.write')
313 316 write_perm.repository_id = repo_id
314 317 write_perm.user_id = user_id
315 318 Session().add(write_perm)
316 319 Session().flush()
317 320
318 321 perm = write_perm
319 322
320 323 rule = UserToRepoBranchPermission()
321 324 rule.rule_to_perm_id = perm.repo_to_perm_id
322 325 rule.branch_pattern = pattern
323 326 rule.rule_order = 10
324 327 rule.permission = rule_perm_obj
325 328 rule.repository_id = repo_id
326 329 Session().add(rule)
327 330 Session().commit()
328 331
329 332 return rule
330 333
331 334 @request.addfinalizer
332 335 def cleanup():
333 336 if rule:
334 337 Session().delete(rule)
335 338 Session().commit()
336 339 if write_perm:
337 340 Session().delete(write_perm)
338 341 Session().commit()
339 342
340 343 return _branch_permissions_setter
341 344
342 345
General Comments 0
You need to be logged in to leave comments. Login now