##// END OF EJS Templates
tests: multiple tests cases fixes for python3
super-admin -
r4994:4e9283a1 default
parent child Browse files
Show More
@@ -1,76 +1,75 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.apps._base import ADMIN_PREFIX
24 24 from rhodecode.model.db import User, UserEmailMap, Repository, UserFollowing
25 25 from rhodecode.tests import (
26 26 TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL,
27 27 assert_session_flash)
28 28 from rhodecode.tests.fixture import Fixture
29 29
30 30 fixture = Fixture()
31 31
32 32
33 33 def route_path(name, **kwargs):
34 34 return {
35 35 'my_account_repos':
36 36 ADMIN_PREFIX + '/my_account/repos',
37 37 'my_account_watched':
38 38 ADMIN_PREFIX + '/my_account/watched',
39 39 'my_account_perms':
40 40 ADMIN_PREFIX + '/my_account/perms',
41 41 'my_account_notifications':
42 42 ADMIN_PREFIX + '/my_account/notifications',
43 43 }[name].format(**kwargs)
44 44
45 45
46 46 class TestMyAccountSimpleViews(TestController):
47 47
48 48 def test_my_account_my_repos(self, autologin_user):
49 49 response = self.app.get(route_path('my_account_repos'))
50 50 repos = Repository.query().filter(
51 51 Repository.user == User.get_by_username(
52 52 TEST_USER_ADMIN_LOGIN)).all()
53 53 for repo in repos:
54 response.mustcontain('"name_raw": "%s"' % repo.repo_name)
54 response.mustcontain(f'"name_raw":"{repo.repo_name}"')
55 55
56 56 def test_my_account_my_watched(self, autologin_user):
57 57 response = self.app.get(route_path('my_account_watched'))
58 58
59 59 repos = UserFollowing.query().filter(
60 60 UserFollowing.user == User.get_by_username(
61 61 TEST_USER_ADMIN_LOGIN)).all()
62 62 for repo in repos:
63 response.mustcontain(
64 '"name_raw": "%s"' % repo.follows_repository.repo_name)
63 response.mustcontain(f'"name_raw":"{repo.follows_repository.repo_name}"')
65 64
66 65 def test_my_account_perms(self, autologin_user):
67 66 response = self.app.get(route_path('my_account_perms'))
68 67 assert_response = response.assert_response()
69 68 assert assert_response.get_elements('.perm_tag.none')
70 69 assert assert_response.get_elements('.perm_tag.read')
71 70 assert assert_response.get_elements('.perm_tag.write')
72 71 assert assert_response.get_elements('.perm_tag.admin')
73 72
74 73 def test_my_account_notifications(self, autologin_user):
75 74 response = self.app.get(route_path('my_account_notifications'))
76 75 response.mustcontain('Test flash message')
@@ -1,220 +1,220 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import re
22 22
23 23 import pytest
24 24
25 25 from rhodecode.apps.repository.views.repo_changelog import DEFAULT_CHANGELOG_SIZE
26 26 from rhodecode.tests import TestController
27 27
28 28 MATCH_HASH = re.compile(r'<span class="commit_hash">r(\d+):[\da-f]+</span>')
29 29
30 30
31 31 def route_path(name, params=None, **kwargs):
32 32 import urllib.request, urllib.parse, urllib.error
33 33
34 34 base_url = {
35 35 'repo_changelog': '/{repo_name}/changelog',
36 36 'repo_commits': '/{repo_name}/commits',
37 37 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
38 38 'repo_commits_elements': '/{repo_name}/commits_elements',
39 39 }[name].format(**kwargs)
40 40
41 41 if params:
42 42 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
43 43 return base_url
44 44
45 45
46 46 def assert_commits_on_page(response, indexes):
47 47 found_indexes = [int(idx) for idx in MATCH_HASH.findall(response.body)]
48 48 assert found_indexes == indexes
49 49
50 50
51 51 class TestChangelogController(TestController):
52 52
53 53 def test_commits_page(self, backend):
54 54 self.log_user()
55 55 response = self.app.get(
56 56 route_path('repo_commits', repo_name=backend.repo_name))
57 57
58 58 first_idx = -1
59 59 last_idx = -DEFAULT_CHANGELOG_SIZE
60 60 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
61 61
62 62 def test_changelog(self, backend):
63 63 self.log_user()
64 64 response = self.app.get(
65 65 route_path('repo_changelog', repo_name=backend.repo_name))
66 66
67 67 first_idx = -1
68 68 last_idx = -DEFAULT_CHANGELOG_SIZE
69 69 self.assert_commit_range_on_page(
70 70 response, first_idx, last_idx, backend)
71 71
72 72 @pytest.mark.backends("hg", "git")
73 73 def test_changelog_filtered_by_branch(self, backend):
74 74 self.log_user()
75 75 self.app.get(
76 76 route_path('repo_changelog', repo_name=backend.repo_name,
77 77 params=dict(branch=backend.default_branch_name)),
78 78 status=200)
79 79
80 80 @pytest.mark.backends("hg", "git")
81 81 def test_commits_filtered_by_branch(self, backend):
82 82 self.log_user()
83 83 self.app.get(
84 84 route_path('repo_commits', repo_name=backend.repo_name,
85 85 params=dict(branch=backend.default_branch_name)),
86 86 status=200)
87 87
88 88 @pytest.mark.backends("svn")
89 89 def test_changelog_filtered_by_branch_svn(self, autologin_user, backend):
90 90 repo = backend['svn-simple-layout']
91 91 response = self.app.get(
92 92 route_path('repo_changelog', repo_name=repo.repo_name,
93 93 params=dict(branch='trunk')),
94 94 status=200)
95 95
96 96 assert_commits_on_page(response, indexes=[15, 12, 7, 3, 2, 1])
97 97
98 98 def test_commits_filtered_by_wrong_branch(self, backend):
99 99 self.log_user()
100 100 branch = 'wrong-branch-name'
101 101 response = self.app.get(
102 102 route_path('repo_commits', repo_name=backend.repo_name,
103 103 params=dict(branch=branch)),
104 104 status=302)
105 105 expected_url = '/{repo}/commits/{branch}'.format(
106 106 repo=backend.repo_name, branch=branch)
107 107 assert expected_url in response.location
108 108 response = response.follow()
109 109 expected_warning = 'Branch {} is not found.'.format(branch)
110 assert expected_warning in response.body
110 assert expected_warning in response.text
111 111
112 112 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
113 113 def test_changelog_filtered_by_branch_with_merges(
114 114 self, autologin_user, backend):
115 115
116 116 # Note: The changelog of branch "b" does not contain the commit "a1"
117 117 # although this is a parent of commit "b1". And branch "b" has commits
118 118 # which have a smaller index than commit "a1".
119 119 commits = [
120 120 {'message': 'a'},
121 121 {'message': 'b', 'branch': 'b'},
122 122 {'message': 'a1', 'parents': ['a']},
123 123 {'message': 'b1', 'branch': 'b', 'parents': ['b', 'a1']},
124 124 ]
125 125 backend.create_repo(commits)
126 126
127 127 self.app.get(
128 128 route_path('repo_changelog', repo_name=backend.repo_name,
129 129 params=dict(branch='b')),
130 130 status=200)
131 131
132 132 @pytest.mark.backends("hg")
133 133 def test_commits_closed_branches(self, autologin_user, backend):
134 134 repo = backend['closed_branch']
135 135 response = self.app.get(
136 136 route_path('repo_commits', repo_name=repo.repo_name,
137 137 params=dict(branch='experimental')),
138 138 status=200)
139 139
140 140 assert_commits_on_page(response, indexes=[3, 1])
141 141
142 142 def test_changelog_pagination(self, backend):
143 143 self.log_user()
144 144 # pagination, walk up to page 6
145 145 changelog_url = route_path(
146 146 'repo_commits', repo_name=backend.repo_name)
147 147
148 148 for page in range(1, 7):
149 149 response = self.app.get(changelog_url, {'page': page})
150 150
151 151 first_idx = -DEFAULT_CHANGELOG_SIZE * (page - 1) - 1
152 152 last_idx = -DEFAULT_CHANGELOG_SIZE * page
153 153 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
154 154
155 155 def assert_commit_range_on_page(
156 156 self, response, first_idx, last_idx, backend):
157 157 input_template = (
158 158 """<input class="commit-range" """
159 159 """data-commit-id="%(raw_id)s" data-commit-idx="%(idx)s" """
160 160 """data-short-id="%(short_id)s" id="%(raw_id)s" """
161 161 """name="%(raw_id)s" type="checkbox" value="1" />"""
162 162 )
163 163
164 164 commit_span_template = """<span class="commit_hash">r%s:%s</span>"""
165 165 repo = backend.repo
166 166
167 167 first_commit_on_page = repo.get_commit(commit_idx=first_idx)
168 168 response.mustcontain(
169 169 input_template % {'raw_id': first_commit_on_page.raw_id,
170 170 'idx': first_commit_on_page.idx,
171 171 'short_id': first_commit_on_page.short_id})
172 172
173 173 response.mustcontain(commit_span_template % (
174 174 first_commit_on_page.idx, first_commit_on_page.short_id)
175 175 )
176 176
177 177 last_commit_on_page = repo.get_commit(commit_idx=last_idx)
178 178 response.mustcontain(
179 179 input_template % {'raw_id': last_commit_on_page.raw_id,
180 180 'idx': last_commit_on_page.idx,
181 181 'short_id': last_commit_on_page.short_id})
182 182 response.mustcontain(commit_span_template % (
183 183 last_commit_on_page.idx, last_commit_on_page.short_id)
184 184 )
185 185
186 186 first_commit_of_next_page = repo.get_commit(commit_idx=last_idx - 1)
187 187 first_span_of_next_page = commit_span_template % (
188 188 first_commit_of_next_page.idx, first_commit_of_next_page.short_id)
189 189 assert first_span_of_next_page not in response
190 190
191 191 @pytest.mark.parametrize('test_path', [
192 192 'vcs/exceptions.py',
193 193 '/vcs/exceptions.py',
194 194 '//vcs/exceptions.py'
195 195 ])
196 196 def test_commits_with_filenode(self, backend, test_path):
197 197 self.log_user()
198 198 response = self.app.get(
199 199 route_path('repo_commits_file', repo_name=backend.repo_name,
200 200 commit_id='tip', f_path=test_path),
201 201 )
202 202
203 203 # history commits messages
204 204 response.mustcontain('Added exceptions module, this time for real')
205 205 response.mustcontain('Added not implemented hg backend test case')
206 206 response.mustcontain('Added BaseChangeset class')
207 207
208 208 def test_commits_with_filenode_that_is_dirnode(self, backend):
209 209 self.log_user()
210 210 self.app.get(
211 211 route_path('repo_commits_file', repo_name=backend.repo_name,
212 212 commit_id='tip', f_path='/tests'),
213 213 status=302)
214 214
215 215 def test_commits_with_filenode_not_existing(self, backend):
216 216 self.log_user()
217 217 self.app.get(
218 218 route_path('repo_commits_file', repo_name=backend.repo_name,
219 219 commit_id='tip', f_path='wrong_path'),
220 220 status=302)
@@ -1,327 +1,327 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
24 24 from rhodecode.lib.helpers import _shorten_commit_id
25 25
26 26
27 27 def route_path(name, params=None, **kwargs):
28 28 import urllib.request, urllib.parse, urllib.error
29 29
30 30 base_url = {
31 31 'repo_commit': '/{repo_name}/changeset/{commit_id}',
32 32 'repo_commit_children': '/{repo_name}/changeset_children/{commit_id}',
33 33 'repo_commit_parents': '/{repo_name}/changeset_parents/{commit_id}',
34 34 'repo_commit_raw': '/{repo_name}/changeset-diff/{commit_id}',
35 35 'repo_commit_patch': '/{repo_name}/changeset-patch/{commit_id}',
36 36 'repo_commit_download': '/{repo_name}/changeset-download/{commit_id}',
37 37 'repo_commit_data': '/{repo_name}/changeset-data/{commit_id}',
38 38 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
39 39 }[name].format(**kwargs)
40 40
41 41 if params:
42 42 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
43 43 return base_url
44 44
45 45
46 46 @pytest.mark.usefixtures("app")
47 47 class TestRepoCommitView(object):
48 48
49 49 def test_show_commit(self, backend):
50 50 commit_id = self.commit_id[backend.alias]
51 51 response = self.app.get(route_path(
52 52 'repo_commit', repo_name=backend.repo_name, commit_id=commit_id))
53 53 response.mustcontain('Added a symlink')
54 54 response.mustcontain(commit_id)
55 55 response.mustcontain('No newline at end of file')
56 56
57 57 def test_show_raw(self, backend):
58 58 commit_id = self.commit_id[backend.alias]
59 59 response = self.app.get(route_path(
60 60 'repo_commit_raw',
61 61 repo_name=backend.repo_name, commit_id=commit_id))
62 assert response.body == self.diffs[backend.alias]
62 assert response.text == self.diffs[backend.alias]
63 63
64 64 def test_show_raw_patch(self, backend):
65 65 response = self.app.get(route_path(
66 66 'repo_commit_patch', repo_name=backend.repo_name,
67 67 commit_id=self.commit_id[backend.alias]))
68 assert response.body == self.patches[backend.alias]
68 assert response.text == self.patches[backend.alias]
69 69
70 70 def test_commit_download(self, backend):
71 71 response = self.app.get(route_path(
72 72 'repo_commit_download',
73 73 repo_name=backend.repo_name,
74 74 commit_id=self.commit_id[backend.alias]))
75 assert response.body == self.diffs[backend.alias]
75 assert response.text == self.diffs[backend.alias]
76 76
77 77 def test_single_commit_page_different_ops(self, backend):
78 78 commit_id = {
79 79 'hg': '603d6c72c46d953420c89d36372f08d9f305f5dd',
80 80 'git': '03fa803d7e9fb14daa9a3089e0d1494eda75d986',
81 81 'svn': '337',
82 82 }
83 83 diff_stat = {
84 84 'hg': (21, 943, 288),
85 85 'git': (20, 941, 286),
86 86 'svn': (21, 943, 288),
87 87 }
88 88
89 89 commit_id = commit_id[backend.alias]
90 90 response = self.app.get(route_path(
91 91 'repo_commit',
92 92 repo_name=backend.repo_name, commit_id=commit_id))
93 93
94 94 response.mustcontain(_shorten_commit_id(commit_id))
95 95
96 96 compare_page = ComparePage(response)
97 97 file_changes = diff_stat[backend.alias]
98 98 compare_page.contains_change_summary(*file_changes)
99 99
100 100 # files op files
101 101 response.mustcontain('File not present at commit: %s' %
102 102 _shorten_commit_id(commit_id))
103 103
104 104 # svn uses a different filename
105 105 if backend.alias == 'svn':
106 106 response.mustcontain('new file 10644')
107 107 else:
108 108 response.mustcontain('new file 100644')
109 109 response.mustcontain('Changed theme to ADC theme') # commit msg
110 110
111 111 self._check_new_diff_menus(response, right_menu=True)
112 112
113 113 def test_commit_range_page_different_ops(self, backend):
114 114 commit_id_range = {
115 115 'hg': (
116 116 '25d7e49c18b159446cadfa506a5cf8ad1cb04067',
117 117 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
118 118 'git': (
119 119 '6fc9270775aaf5544c1deb014f4ddd60c952fcbb',
120 120 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
121 121 'svn': (
122 122 '335',
123 123 '337'),
124 124 }
125 125 commit_ids = commit_id_range[backend.alias]
126 126 commit_id = '%s...%s' % (commit_ids[0], commit_ids[1])
127 127 response = self.app.get(route_path(
128 128 'repo_commit',
129 129 repo_name=backend.repo_name, commit_id=commit_id))
130 130
131 131 response.mustcontain(_shorten_commit_id(commit_ids[0]))
132 132 response.mustcontain(_shorten_commit_id(commit_ids[1]))
133 133
134 134 compare_page = ComparePage(response)
135 135
136 136 # svn is special
137 137 if backend.alias == 'svn':
138 138 response.mustcontain('new file 10644')
139 139 for file_changes in [(1, 5, 1), (12, 236, 22), (21, 943, 288)]:
140 140 compare_page.contains_change_summary(*file_changes)
141 141 elif backend.alias == 'git':
142 142 response.mustcontain('new file 100644')
143 143 for file_changes in [(12, 222, 20), (20, 941, 286)]:
144 144 compare_page.contains_change_summary(*file_changes)
145 145 else:
146 146 response.mustcontain('new file 100644')
147 147 for file_changes in [(12, 222, 20), (21, 943, 288)]:
148 148 compare_page.contains_change_summary(*file_changes)
149 149
150 150 # files op files
151 151 response.mustcontain('File not present at commit: %s' % _shorten_commit_id(commit_ids[1]))
152 152 response.mustcontain('Added docstrings to vcs.cli') # commit msg
153 153 response.mustcontain('Changed theme to ADC theme') # commit msg
154 154
155 155 self._check_new_diff_menus(response)
156 156
157 157 def test_combined_compare_commit_page_different_ops(self, backend):
158 158 commit_id_range = {
159 159 'hg': (
160 160 '4fdd71e9427417b2e904e0464c634fdee85ec5a7',
161 161 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
162 162 'git': (
163 163 'f5fbf9cfd5f1f1be146f6d3b38bcd791a7480c13',
164 164 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
165 165 'svn': (
166 166 '335',
167 167 '337'),
168 168 }
169 169 commit_ids = commit_id_range[backend.alias]
170 170 response = self.app.get(route_path(
171 171 'repo_compare',
172 172 repo_name=backend.repo_name,
173 173 source_ref_type='rev', source_ref=commit_ids[0],
174 174 target_ref_type='rev', target_ref=commit_ids[1], ))
175 175
176 176 response.mustcontain(_shorten_commit_id(commit_ids[0]))
177 177 response.mustcontain(_shorten_commit_id(commit_ids[1]))
178 178
179 179 # files op files
180 180 response.mustcontain('File not present at commit: %s' %
181 181 _shorten_commit_id(commit_ids[1]))
182 182
183 183 compare_page = ComparePage(response)
184 184
185 185 # svn is special
186 186 if backend.alias == 'svn':
187 187 response.mustcontain('new file 10644')
188 188 file_changes = (32, 1179, 310)
189 189 compare_page.contains_change_summary(*file_changes)
190 190 elif backend.alias == 'git':
191 191 response.mustcontain('new file 100644')
192 192 file_changes = (31, 1163, 306)
193 193 compare_page.contains_change_summary(*file_changes)
194 194 else:
195 195 response.mustcontain('new file 100644')
196 196 file_changes = (32, 1165, 308)
197 197 compare_page.contains_change_summary(*file_changes)
198 198
199 199 response.mustcontain('Added docstrings to vcs.cli') # commit msg
200 200 response.mustcontain('Changed theme to ADC theme') # commit msg
201 201
202 202 self._check_new_diff_menus(response)
203 203
204 204 def test_changeset_range(self, backend):
205 205 self._check_changeset_range(
206 206 backend, self.commit_id_range, self.commit_id_range_result)
207 207
208 208 def test_changeset_range_with_initial_commit(self, backend):
209 209 commit_id_range = {
210 210 'hg': (
211 211 'b986218ba1c9b0d6a259fac9b050b1724ed8e545'
212 212 '...6cba7170863a2411822803fa77a0a264f1310b35'),
213 213 'git': (
214 214 'c1214f7e79e02fc37156ff215cd71275450cffc3'
215 215 '...fa6600f6848800641328adbf7811fd2372c02ab2'),
216 216 'svn': '1...3',
217 217 }
218 218 commit_id_range_result = {
219 219 'hg': ['b986218ba1c9', '3d8f361e72ab', '6cba7170863a'],
220 220 'git': ['c1214f7e79e0', '38b5fe81f109', 'fa6600f68488'],
221 221 'svn': ['1', '2', '3'],
222 222 }
223 223 self._check_changeset_range(
224 224 backend, commit_id_range, commit_id_range_result)
225 225
226 226 def _check_changeset_range(
227 227 self, backend, commit_id_ranges, commit_id_range_result):
228 228 response = self.app.get(
229 229 route_path('repo_commit',
230 230 repo_name=backend.repo_name,
231 231 commit_id=commit_id_ranges[backend.alias]))
232 232
233 233 expected_result = commit_id_range_result[backend.alias]
234 234 response.mustcontain('{} commits'.format(len(expected_result)))
235 235 for commit_id in expected_result:
236 236 response.mustcontain(commit_id)
237 237
238 238 commit_id = {
239 239 'hg': '2062ec7beeeaf9f44a1c25c41479565040b930b2',
240 240 'svn': '393',
241 241 'git': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
242 242 }
243 243
244 244 commit_id_range = {
245 245 'hg': (
246 246 'a53d9201d4bc278910d416d94941b7ea007ecd52'
247 247 '...2062ec7beeeaf9f44a1c25c41479565040b930b2'),
248 248 'git': (
249 249 '7ab37bc680b4aa72c34d07b230c866c28e9fc204'
250 250 '...fd627b9e0dd80b47be81af07c4a98518244ed2f7'),
251 251 'svn': '391...393',
252 252 }
253 253
254 254 commit_id_range_result = {
255 255 'hg': ['a53d9201d4bc', '96507bd11ecc', '2062ec7beeea'],
256 256 'git': ['7ab37bc680b4', '5f2c6ee19592', 'fd627b9e0dd8'],
257 257 'svn': ['391', '392', '393'],
258 258 }
259 259
260 260 diffs = {
261 261 'hg': r"""diff --git a/README b/README
262 262 new file mode 120000
263 263 --- /dev/null
264 264 +++ b/README
265 265 @@ -0,0 +1,1 @@
266 266 +README.rst
267 267 \ No newline at end of file
268 268 """,
269 269 'git': r"""diff --git a/README b/README
270 270 new file mode 120000
271 271 index 0000000..92cacd2
272 272 --- /dev/null
273 273 +++ b/README
274 274 @@ -0,0 +1 @@
275 275 +README.rst
276 276 \ No newline at end of file
277 277 """,
278 278 'svn': """Index: README
279 279 ===================================================================
280 280 diff --git a/README b/README
281 281 new file mode 10644
282 282 --- /dev/null\t(revision 0)
283 283 +++ b/README\t(revision 393)
284 284 @@ -0,0 +1 @@
285 285 +link README.rst
286 286 \\ No newline at end of file
287 287 """,
288 288 }
289 289
290 290 patches = {
291 291 'hg': r"""# HG changeset patch
292 292 # User Marcin Kuzminski <marcin@python-works.com>
293 293 # Date 2014-01-07 12:21:40
294 294 # Node ID 2062ec7beeeaf9f44a1c25c41479565040b930b2
295 295 # Parent 96507bd11ecc815ebc6270fdf6db110928c09c1e
296 296
297 297 Added a symlink
298 298
299 299 """ + diffs['hg'],
300 300 'git': r"""From fd627b9e0dd80b47be81af07c4a98518244ed2f7 2014-01-07 12:22:20
301 301 From: Marcin Kuzminski <marcin@python-works.com>
302 302 Date: 2014-01-07 12:22:20
303 303 Subject: [PATCH] Added a symlink
304 304
305 305 ---
306 306
307 307 """ + diffs['git'],
308 308 'svn': r"""# SVN changeset patch
309 309 # User marcin
310 310 # Date 2014-09-02 12:25:22.071142
311 311 # Revision 393
312 312
313 313 Added a symlink
314 314
315 315 """ + diffs['svn'],
316 316 }
317 317
318 318 def _check_new_diff_menus(self, response, right_menu=False,):
319 319 # individual file diff menus
320 320 for elem in ['Show file before', 'Show file after']:
321 321 response.mustcontain(elem)
322 322
323 323 # right pane diff menus
324 324 if right_menu:
325 325 for elem in ['Hide whitespace changes', 'Toggle wide diff',
326 326 'Show full context diff']:
327 327 response.mustcontain(elem)
@@ -1,672 +1,672 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import lxml.html
24 24
25 25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
26 26 from rhodecode.tests import assert_session_flash
27 27 from rhodecode.tests.utils import AssertResponse, commit_change
28 28
29 29
30 30 def route_path(name, params=None, **kwargs):
31 31 import urllib.request, urllib.parse, urllib.error
32 32
33 33 base_url = {
34 34 'repo_compare_select': '/{repo_name}/compare',
35 35 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
36 36 }[name].format(**kwargs)
37 37
38 38 if params:
39 39 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
40 40 return base_url
41 41
42 42
43 43 @pytest.mark.usefixtures("autologin_user", "app")
44 44 class TestCompareView(object):
45 45
46 46 def test_compare_index_is_reached_at_least_once(self, backend):
47 47 repo = backend.repo
48 48 self.app.get(
49 49 route_path('repo_compare_select', repo_name=repo.repo_name))
50 50
51 51 @pytest.mark.xfail_backends("svn", reason="Requires pull")
52 52 def test_compare_remote_with_different_commit_indexes(self, backend):
53 53 # Preparing the following repository structure:
54 54 #
55 55 # Origin repository has two commits:
56 56 #
57 57 # 0 1
58 58 # A -- D
59 59 #
60 60 # The fork of it has a few more commits and "D" has a commit index
61 61 # which does not exist in origin.
62 62 #
63 63 # 0 1 2 3 4
64 64 # A -- -- -- D -- E
65 65 # \- B -- C
66 66 #
67 67
68 68 fork = backend.create_repo()
69 69
70 70 # prepare fork
71 71 commit0 = commit_change(
72 72 fork.repo_name, filename='file1', content='A',
73 73 message='A', vcs_type=backend.alias, parent=None, newfile=True)
74 74
75 75 commit1 = commit_change(
76 76 fork.repo_name, filename='file1', content='B',
77 77 message='B, child of A', vcs_type=backend.alias, parent=commit0)
78 78
79 79 commit_change( # commit 2
80 80 fork.repo_name, filename='file1', content='C',
81 81 message='C, child of B', vcs_type=backend.alias, parent=commit1)
82 82
83 83 commit3 = commit_change(
84 84 fork.repo_name, filename='file1', content='D',
85 85 message='D, child of A', vcs_type=backend.alias, parent=commit0)
86 86
87 87 commit4 = commit_change(
88 88 fork.repo_name, filename='file1', content='E',
89 89 message='E, child of D', vcs_type=backend.alias, parent=commit3)
90 90
91 91 # prepare origin repository, taking just the history up to D
92 92 origin = backend.create_repo()
93 93
94 94 origin_repo = origin.scm_instance(cache=False)
95 95 origin_repo.config.clear_section('hooks')
96 96 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
97 97 origin_repo = origin.scm_instance(cache=False) # cache rebuild
98 98
99 99 # Verify test fixture setup
100 100 # This does not work for git
101 101 if backend.alias != 'git':
102 102 assert 5 == len(fork.scm_instance().commit_ids)
103 103 assert 2 == len(origin_repo.commit_ids)
104 104
105 105 # Comparing the revisions
106 106 response = self.app.get(
107 107 route_path('repo_compare',
108 108 repo_name=origin.repo_name,
109 109 source_ref_type="rev", source_ref=commit3.raw_id,
110 110 target_ref_type="rev", target_ref=commit4.raw_id,
111 111 params=dict(merge='1', target_repo=fork.repo_name)
112 112 ))
113 113
114 114 compare_page = ComparePage(response)
115 115 compare_page.contains_commits([commit4])
116 116
117 117 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
118 118 def test_compare_forks_on_branch_extra_commits(self, backend):
119 119 repo1 = backend.create_repo()
120 120
121 121 # commit something !
122 122 commit0 = commit_change(
123 123 repo1.repo_name, filename='file1', content='line1\n',
124 124 message='commit1', vcs_type=backend.alias, parent=None,
125 125 newfile=True)
126 126
127 127 # fork this repo
128 128 repo2 = backend.create_fork()
129 129
130 130 # add two extra commit into fork
131 131 commit1 = commit_change(
132 132 repo2.repo_name, filename='file1', content='line1\nline2\n',
133 133 message='commit2', vcs_type=backend.alias, parent=commit0)
134 134
135 135 commit2 = commit_change(
136 136 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
137 137 message='commit3', vcs_type=backend.alias, parent=commit1)
138 138
139 139 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
140 140 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
141 141
142 142 response = self.app.get(
143 143 route_path('repo_compare',
144 144 repo_name=repo1.repo_name,
145 145 source_ref_type="branch", source_ref=commit_id2,
146 146 target_ref_type="branch", target_ref=commit_id1,
147 147 params=dict(merge='1', target_repo=repo2.repo_name)
148 148 ))
149 149
150 150 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
151 151 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
152 152
153 153 compare_page = ComparePage(response)
154 154 compare_page.contains_change_summary(1, 2, 0)
155 155 compare_page.contains_commits([commit1, commit2])
156 156
157 157 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
158 158 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
159 159
160 160 # Swap is removed when comparing branches since it's a PR feature and
161 161 # it is then a preview mode
162 162 compare_page.swap_is_hidden()
163 163 compare_page.target_source_are_disabled()
164 164
165 165 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
166 166 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(self, backend):
167 167 repo1 = backend.create_repo()
168 168
169 169 # commit something !
170 170 commit0 = commit_change(
171 171 repo1.repo_name, filename='file1', content='line1\n',
172 172 message='commit1', vcs_type=backend.alias, parent=None,
173 173 newfile=True)
174 174
175 175 # fork this repo
176 176 repo2 = backend.create_fork()
177 177
178 178 # now commit something to origin repo
179 179 commit_change(
180 180 repo1.repo_name, filename='file2', content='line1file2\n',
181 181 message='commit2', vcs_type=backend.alias, parent=commit0,
182 182 newfile=True)
183 183
184 184 # add two extra commit into fork
185 185 commit1 = commit_change(
186 186 repo2.repo_name, filename='file1', content='line1\nline2\n',
187 187 message='commit2', vcs_type=backend.alias, parent=commit0)
188 188
189 189 commit2 = commit_change(
190 190 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
191 191 message='commit3', vcs_type=backend.alias, parent=commit1)
192 192
193 193 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
194 194 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
195 195
196 196 response = self.app.get(
197 197 route_path('repo_compare',
198 198 repo_name=repo1.repo_name,
199 199 source_ref_type="branch", source_ref=commit_id2,
200 200 target_ref_type="branch", target_ref=commit_id1,
201 201 params=dict(merge='1', target_repo=repo2.repo_name),
202 202 ))
203 203
204 204 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
205 205 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
206 206
207 207 compare_page = ComparePage(response)
208 208 compare_page.contains_change_summary(1, 2, 0)
209 209 compare_page.contains_commits([commit1, commit2])
210 210 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
211 211 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
212 212
213 213 # Swap is removed when comparing branches since it's a PR feature and
214 214 # it is then a preview mode
215 215 compare_page.swap_is_hidden()
216 216 compare_page.target_source_are_disabled()
217 217
218 218 @pytest.mark.xfail_backends("svn")
219 219 # TODO(marcink): no svn support for compare two seperate repos
220 220 def test_compare_of_unrelated_forks(self, backend):
221 221 orig = backend.create_repo(number_of_commits=1)
222 222 fork = backend.create_repo(number_of_commits=1)
223 223
224 224 response = self.app.get(
225 225 route_path('repo_compare',
226 226 repo_name=orig.repo_name,
227 227 source_ref_type="rev", source_ref="tip",
228 228 target_ref_type="rev", target_ref="tip",
229 229 params=dict(merge='1', target_repo=fork.repo_name),
230 230 ),
231 231 status=302)
232 232 response = response.follow()
233 233 response.mustcontain("Repositories unrelated.")
234 234
235 235 @pytest.mark.xfail_backends("svn")
236 236 def test_compare_cherry_pick_commits_from_bottom(self, backend):
237 237
238 238 # repo1:
239 239 # commit0:
240 240 # commit1:
241 241 # repo1-fork- in which we will cherry pick bottom commits
242 242 # commit0:
243 243 # commit1:
244 244 # commit2: x
245 245 # commit3: x
246 246 # commit4: x
247 247 # commit5:
248 248 # make repo1, and commit1+commit2
249 249
250 250 repo1 = backend.create_repo()
251 251
252 252 # commit something !
253 253 commit0 = commit_change(
254 254 repo1.repo_name, filename='file1', content='line1\n',
255 255 message='commit1', vcs_type=backend.alias, parent=None,
256 256 newfile=True)
257 257 commit1 = commit_change(
258 258 repo1.repo_name, filename='file1', content='line1\nline2\n',
259 259 message='commit2', vcs_type=backend.alias, parent=commit0)
260 260
261 261 # fork this repo
262 262 repo2 = backend.create_fork()
263 263
264 264 # now make commit3-6
265 265 commit2 = commit_change(
266 266 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
267 267 message='commit3', vcs_type=backend.alias, parent=commit1)
268 268 commit3 = commit_change(
269 269 repo1.repo_name, filename='file1',
270 270 content='line1\nline2\nline3\nline4\n', message='commit4',
271 271 vcs_type=backend.alias, parent=commit2)
272 272 commit4 = commit_change(
273 273 repo1.repo_name, filename='file1',
274 274 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
275 275 vcs_type=backend.alias, parent=commit3)
276 276 commit_change( # commit 5
277 277 repo1.repo_name, filename='file1',
278 278 content='line1\nline2\nline3\nline4\nline5\nline6\n',
279 279 message='commit6', vcs_type=backend.alias, parent=commit4)
280 280
281 281 response = self.app.get(
282 282 route_path('repo_compare',
283 283 repo_name=repo2.repo_name,
284 284 # parent of commit2, in target repo2
285 285 source_ref_type="rev", source_ref=commit1.raw_id,
286 286 target_ref_type="rev", target_ref=commit4.raw_id,
287 287 params=dict(merge='1', target_repo=repo1.repo_name),
288 288 ))
289 289 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
290 290 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
291 291
292 292 # files
293 293 compare_page = ComparePage(response)
294 294 compare_page.contains_change_summary(1, 3, 0)
295 295 compare_page.contains_commits([commit2, commit3, commit4])
296 296 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
297 297 compare_page.contains_file_links_and_anchors([('file1', anchor),])
298 298
299 299 @pytest.mark.xfail_backends("svn")
300 300 def test_compare_cherry_pick_commits_from_top(self, backend):
301 301 # repo1:
302 302 # commit0:
303 303 # commit1:
304 304 # repo1-fork- in which we will cherry pick bottom commits
305 305 # commit0:
306 306 # commit1:
307 307 # commit2:
308 308 # commit3: x
309 309 # commit4: x
310 310 # commit5: x
311 311
312 312 # make repo1, and commit1+commit2
313 313 repo1 = backend.create_repo()
314 314
315 315 # commit something !
316 316 commit0 = commit_change(
317 317 repo1.repo_name, filename='file1', content='line1\n',
318 318 message='commit1', vcs_type=backend.alias, parent=None,
319 319 newfile=True)
320 320 commit1 = commit_change(
321 321 repo1.repo_name, filename='file1', content='line1\nline2\n',
322 322 message='commit2', vcs_type=backend.alias, parent=commit0)
323 323
324 324 # fork this repo
325 325 backend.create_fork()
326 326
327 327 # now make commit3-6
328 328 commit2 = commit_change(
329 329 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
330 330 message='commit3', vcs_type=backend.alias, parent=commit1)
331 331 commit3 = commit_change(
332 332 repo1.repo_name, filename='file1',
333 333 content='line1\nline2\nline3\nline4\n', message='commit4',
334 334 vcs_type=backend.alias, parent=commit2)
335 335 commit4 = commit_change(
336 336 repo1.repo_name, filename='file1',
337 337 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
338 338 vcs_type=backend.alias, parent=commit3)
339 339 commit5 = commit_change(
340 340 repo1.repo_name, filename='file1',
341 341 content='line1\nline2\nline3\nline4\nline5\nline6\n',
342 342 message='commit6', vcs_type=backend.alias, parent=commit4)
343 343
344 344 response = self.app.get(
345 345 route_path('repo_compare',
346 346 repo_name=repo1.repo_name,
347 347 # parent of commit3, not in source repo2
348 348 source_ref_type="rev", source_ref=commit2.raw_id,
349 349 target_ref_type="rev", target_ref=commit5.raw_id,
350 350 params=dict(merge='1'),))
351 351
352 352 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
353 353 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
354 354
355 355 compare_page = ComparePage(response)
356 356 compare_page.contains_change_summary(1, 3, 0)
357 357 compare_page.contains_commits([commit3, commit4, commit5])
358 358
359 359 # files
360 360 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
361 361 compare_page.contains_file_links_and_anchors([('file1', anchor),])
362 362
363 363 @pytest.mark.xfail_backends("svn")
364 364 def test_compare_remote_branches(self, backend):
365 365 repo1 = backend.repo
366 366 repo2 = backend.create_fork()
367 367
368 368 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
369 369 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
370 370 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
371 371 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
372 372
373 373 response = self.app.get(
374 374 route_path('repo_compare',
375 375 repo_name=repo1.repo_name,
376 376 source_ref_type="rev", source_ref=commit_id1,
377 377 target_ref_type="rev", target_ref=commit_id2,
378 378 params=dict(merge='1', target_repo=repo2.repo_name),
379 379 ))
380 380
381 381 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
382 382 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
383 383
384 384 compare_page = ComparePage(response)
385 385
386 386 # outgoing commits between those commits
387 387 compare_page.contains_commits(
388 388 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
389 389
390 390 # files
391 391 compare_page.contains_file_links_and_anchors([
392 392 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
393 393 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
394 394 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
395 395 ])
396 396
397 397 @pytest.mark.xfail_backends("svn")
398 398 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
399 399 repo1 = backend.create_repo()
400 400 r1_name = repo1.repo_name
401 401
402 402 commit0 = commit_change(
403 403 repo=r1_name, filename='file1',
404 404 content='line1', message='commit1', vcs_type=backend.alias,
405 405 newfile=True)
406 406 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
407 407
408 408 # fork the repo1
409 409 repo2 = backend.create_fork()
410 410 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
411 411
412 412 self.r2_id = repo2.repo_id
413 413 r2_name = repo2.repo_name
414 414
415 415 commit1 = commit_change(
416 416 repo=r2_name, filename='file1-fork',
417 417 content='file1-line1-from-fork', message='commit1-fork',
418 418 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
419 419 newfile=True)
420 420
421 421 commit2 = commit_change(
422 422 repo=r2_name, filename='file2-fork',
423 423 content='file2-line1-from-fork', message='commit2-fork',
424 424 vcs_type=backend.alias, parent=commit1,
425 425 newfile=True)
426 426
427 427 commit_change( # commit 3
428 428 repo=r2_name, filename='file3-fork',
429 429 content='file3-line1-from-fork', message='commit3-fork',
430 430 vcs_type=backend.alias, parent=commit2, newfile=True)
431 431
432 432 # compare !
433 433 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
434 434 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
435 435
436 436 response = self.app.get(
437 437 route_path('repo_compare',
438 438 repo_name=r2_name,
439 439 source_ref_type="branch", source_ref=commit_id1,
440 440 target_ref_type="branch", target_ref=commit_id2,
441 441 params=dict(merge='1', target_repo=r1_name),
442 442 ))
443 443
444 444 response.mustcontain('%s@%s' % (r2_name, commit_id1))
445 445 response.mustcontain('%s@%s' % (r1_name, commit_id2))
446 446 response.mustcontain('No files')
447 447 response.mustcontain('No commits in this compare')
448 448
449 449 commit0 = commit_change(
450 450 repo=r1_name, filename='file2',
451 451 content='line1-added-after-fork', message='commit2-parent',
452 452 vcs_type=backend.alias, parent=None, newfile=True)
453 453
454 454 # compare !
455 455 response = self.app.get(
456 456 route_path('repo_compare',
457 457 repo_name=r2_name,
458 458 source_ref_type="branch", source_ref=commit_id1,
459 459 target_ref_type="branch", target_ref=commit_id2,
460 460 params=dict(merge='1', target_repo=r1_name),
461 461 ))
462 462
463 463 response.mustcontain('%s@%s' % (r2_name, commit_id1))
464 464 response.mustcontain('%s@%s' % (r1_name, commit_id2))
465 465
466 466 response.mustcontain("""commit2-parent""")
467 467 response.mustcontain("""line1-added-after-fork""")
468 468 compare_page = ComparePage(response)
469 469 compare_page.contains_change_summary(1, 1, 0)
470 470
471 471 @pytest.mark.xfail_backends("svn")
472 472 def test_compare_commits(self, backend, xhr_header):
473 473 commit0 = backend.repo.get_commit(commit_idx=0)
474 474 commit1 = backend.repo.get_commit(commit_idx=1)
475 475
476 476 response = self.app.get(
477 477 route_path('repo_compare',
478 478 repo_name=backend.repo_name,
479 479 source_ref_type="rev", source_ref=commit0.raw_id,
480 480 target_ref_type="rev", target_ref=commit1.raw_id,
481 481 params=dict(merge='1')
482 482 ),
483 483 extra_environ=xhr_header, )
484 484
485 485 # outgoing commits between those commits
486 486 compare_page = ComparePage(response)
487 487 compare_page.contains_commits(commits=[commit1])
488 488
489 489 def test_errors_when_comparing_unknown_source_repo(self, backend):
490 490 repo = backend.repo
491 491 badrepo = 'badrepo'
492 492
493 493 response = self.app.get(
494 494 route_path('repo_compare',
495 495 repo_name=badrepo,
496 496 source_ref_type="rev", source_ref='tip',
497 497 target_ref_type="rev", target_ref='tip',
498 498 params=dict(merge='1', target_repo=repo.repo_name)
499 499 ),
500 500 status=404)
501 501
502 502 def test_errors_when_comparing_unknown_target_repo(self, backend):
503 503 repo = backend.repo
504 504 badrepo = 'badrepo'
505 505
506 506 response = self.app.get(
507 507 route_path('repo_compare',
508 508 repo_name=repo.repo_name,
509 509 source_ref_type="rev", source_ref='tip',
510 510 target_ref_type="rev", target_ref='tip',
511 511 params=dict(merge='1', target_repo=badrepo),
512 512 ),
513 513 status=302)
514 514 redirected = response.follow()
515 515 redirected.mustcontain(
516 516 'Could not find the target repo: `{}`'.format(badrepo))
517 517
518 518 def test_compare_not_in_preview_mode(self, backend_stub):
519 519 commit0 = backend_stub.repo.get_commit(commit_idx=0)
520 520 commit1 = backend_stub.repo.get_commit(commit_idx=1)
521 521
522 522 response = self.app.get(
523 523 route_path('repo_compare',
524 524 repo_name=backend_stub.repo_name,
525 525 source_ref_type="rev", source_ref=commit0.raw_id,
526 526 target_ref_type="rev", target_ref=commit1.raw_id,
527 527 ))
528 528
529 529 # outgoing commits between those commits
530 530 compare_page = ComparePage(response)
531 531 compare_page.swap_is_visible()
532 532 compare_page.target_source_are_enabled()
533 533
534 534 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
535 535 orig = backend_hg.create_repo(number_of_commits=1)
536 536 fork = backend_hg.create_fork()
537 537
538 538 settings_util.create_repo_rhodecode_ui(
539 539 orig, 'extensions', value='', key='largefiles', active=False)
540 540 settings_util.create_repo_rhodecode_ui(
541 541 fork, 'extensions', value='', key='largefiles', active=True)
542 542
543 543 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
544 544 'MercurialRepository.compare')
545 545 with mock.patch(compare_module) as compare_mock:
546 546 compare_mock.side_effect = RepositoryRequirementError()
547 547
548 548 response = self.app.get(
549 549 route_path('repo_compare',
550 550 repo_name=orig.repo_name,
551 551 source_ref_type="rev", source_ref="tip",
552 552 target_ref_type="rev", target_ref="tip",
553 553 params=dict(merge='1', target_repo=fork.repo_name),
554 554 ),
555 555 status=302)
556 556
557 557 assert_session_flash(
558 558 response,
559 559 'Could not compare repos with different large file settings')
560 560
561 561
562 562 @pytest.mark.usefixtures("autologin_user")
563 563 class TestCompareControllerSvn(object):
564 564
565 565 def test_supports_references_with_path(self, app, backend_svn):
566 566 repo = backend_svn['svn-simple-layout']
567 567 commit_id = repo.get_commit(commit_idx=-1).raw_id
568 568 response = app.get(
569 569 route_path('repo_compare',
570 570 repo_name=repo.repo_name,
571 571 source_ref_type="tag",
572 572 source_ref="%s@%s" % ('tags/v0.1', commit_id),
573 573 target_ref_type="tag",
574 574 target_ref="%s@%s" % ('tags/v0.2', commit_id),
575 575 params=dict(merge='1'),
576 576 ),
577 577 status=200)
578 578
579 579 # Expecting no commits, since both paths are at the same revision
580 580 response.mustcontain('No commits in this compare')
581 581
582 582 # Should find only one file changed when comparing those two tags
583 583 response.mustcontain('example.py')
584 584 compare_page = ComparePage(response)
585 585 compare_page.contains_change_summary(1, 5, 1)
586 586
587 587 def test_shows_commits_if_different_ids(self, app, backend_svn):
588 588 repo = backend_svn['svn-simple-layout']
589 589 source_id = repo.get_commit(commit_idx=-6).raw_id
590 590 target_id = repo.get_commit(commit_idx=-1).raw_id
591 591 response = app.get(
592 592 route_path('repo_compare',
593 593 repo_name=repo.repo_name,
594 594 source_ref_type="tag",
595 595 source_ref="%s@%s" % ('tags/v0.1', source_id),
596 596 target_ref_type="tag",
597 597 target_ref="%s@%s" % ('tags/v0.2', target_id),
598 598 params=dict(merge='1')
599 599 ),
600 600 status=200)
601 601
602 602 # It should show commits
603 assert 'No commits in this compare' not in response.body
603 assert 'No commits in this compare' not in response.text
604 604
605 605 # Should find only one file changed when comparing those two tags
606 606 response.mustcontain('example.py')
607 607 compare_page = ComparePage(response)
608 608 compare_page.contains_change_summary(1, 5, 1)
609 609
610 610
611 611 class ComparePage(AssertResponse):
612 612 """
613 613 Abstracts the page template from the tests
614 614 """
615 615
616 616 def contains_file_links_and_anchors(self, files):
617 617 doc = lxml.html.fromstring(self.response.body)
618 618 for filename, file_id in files:
619 619 self.contains_one_anchor(file_id)
620 620 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
621 621 assert len(diffblock) == 2
622 622 for lnk in diffblock[0].cssselect('a'):
623 623 if 'permalink' in lnk.text:
624 624 assert '#{}'.format(file_id) in lnk.attrib['href']
625 625 break
626 626 else:
627 627 pytest.fail('Unable to find permalink')
628 628
629 629 def contains_change_summary(self, files_changed, inserted, deleted):
630 630 template = (
631 631 '{files_changed} file{plural} changed: '
632 632 '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>')
633 633 self.response.mustcontain(template.format(
634 634 files_changed=files_changed,
635 635 plural="s" if files_changed > 1 else "",
636 636 inserted=inserted,
637 637 deleted=deleted))
638 638
639 639 def contains_commits(self, commits, ancestors=None):
640 640 response = self.response
641 641
642 642 for commit in commits:
643 643 # Expecting to see the commit message in an element which
644 644 # has the ID "c-{commit.raw_id}"
645 645 self.element_contains('#c-' + commit.raw_id, commit.message)
646 646 self.contains_one_link(
647 647 'r%s:%s' % (commit.idx, commit.short_id),
648 648 self._commit_url(commit))
649 649
650 650 if ancestors:
651 651 response.mustcontain('Ancestor')
652 652 for ancestor in ancestors:
653 653 self.contains_one_link(
654 654 ancestor.short_id, self._commit_url(ancestor))
655 655
656 656 def _commit_url(self, commit):
657 657 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
658 658
659 659 def swap_is_hidden(self):
660 660 assert '<a id="btn-swap"' not in self.response.text
661 661
662 662 def swap_is_visible(self):
663 663 assert '<a id="btn-swap"' in self.response.text
664 664
665 665 def target_source_are_disabled(self):
666 666 response = self.response
667 667 response.mustcontain("var enable_fields = false;")
668 668 response.mustcontain('.select2("enable", enable_fields)')
669 669
670 670 def target_source_are_enabled(self):
671 671 response = self.response
672 672 response.mustcontain("var enable_fields = true;")
@@ -1,1092 +1,1092 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
27 27 from rhodecode.apps.repository.views.repo_files import RepoFilesView
28 28 from rhodecode.lib import helpers as h
29 29 from collections import OrderedDict
30 30 from rhodecode.lib.ext_json import json
31 31 from rhodecode.lib.vcs import nodes
32 32
33 33 from rhodecode.lib.vcs.conf import settings
34 34 from rhodecode.tests import assert_session_flash
35 35 from rhodecode.tests.fixture import Fixture
36 36 from rhodecode.model.db import Session
37 37
38 38 fixture = Fixture()
39 39
40 40
41 41 def get_node_history(backend_type):
42 42 return {
43 43 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
44 44 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
45 45 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
46 46 }[backend_type]
47 47
48 48
49 49 def route_path(name, params=None, **kwargs):
50 50 import urllib.request, urllib.parse, urllib.error
51 51
52 52 base_url = {
53 53 'repo_summary': '/{repo_name}',
54 54 'repo_archivefile': '/{repo_name}/archive/{fname}',
55 55 'repo_files_diff': '/{repo_name}/diff/{f_path}',
56 56 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
57 57 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
58 58 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
59 59 'repo_files:default_commit': '/{repo_name}/files',
60 60 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
61 61 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
62 62 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
63 63 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
64 64 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
65 65 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
66 66 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
67 67 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
68 68 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
69 69 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
70 70 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
71 71 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
72 72 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
73 73 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
74 74 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
75 75 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
76 76 }[name].format(**kwargs)
77 77
78 78 if params:
79 79 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
80 80 return base_url
81 81
82 82
83 83 def assert_files_in_response(response, files, params):
84 84 template = (
85 85 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
86 86 _assert_items_in_response(response, files, template, params)
87 87
88 88
89 89 def assert_dirs_in_response(response, dirs, params):
90 90 template = (
91 91 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
92 92 _assert_items_in_response(response, dirs, template, params)
93 93
94 94
95 95 def _assert_items_in_response(response, items, template, params):
96 96 for item in items:
97 97 item_params = {'name': item}
98 98 item_params.update(params)
99 99 response.mustcontain(template % item_params)
100 100
101 101
102 102 def assert_timeago_in_response(response, items, params):
103 103 for item in items:
104 104 response.mustcontain(h.age_component(params['date']))
105 105
106 106
107 107 @pytest.mark.usefixtures("app")
108 108 class TestFilesViews(object):
109 109
110 110 def test_show_files(self, backend):
111 111 response = self.app.get(
112 112 route_path('repo_files',
113 113 repo_name=backend.repo_name,
114 114 commit_id='tip', f_path='/'))
115 115 commit = backend.repo.get_commit()
116 116
117 117 params = {
118 118 'repo_name': backend.repo_name,
119 119 'commit_id': commit.raw_id,
120 120 'date': commit.date
121 121 }
122 122 assert_dirs_in_response(response, ['docs', 'vcs'], params)
123 123 files = [
124 124 '.gitignore',
125 125 '.hgignore',
126 126 '.hgtags',
127 127 # TODO: missing in Git
128 128 # '.travis.yml',
129 129 'MANIFEST.in',
130 130 'README.rst',
131 131 # TODO: File is missing in svn repository
132 132 # 'run_test_and_report.sh',
133 133 'setup.cfg',
134 134 'setup.py',
135 135 'test_and_report.sh',
136 136 'tox.ini',
137 137 ]
138 138 assert_files_in_response(response, files, params)
139 139 assert_timeago_in_response(response, files, params)
140 140
141 141 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
142 142 repo = backend_hg['subrepos']
143 143 response = self.app.get(
144 144 route_path('repo_files',
145 145 repo_name=repo.repo_name,
146 146 commit_id='tip', f_path='/'))
147 147 assert_response = response.assert_response()
148 148 assert_response.contains_one_link(
149 149 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
150 150
151 151 def test_show_files_links_submodules_with_absolute_url_subpaths(
152 152 self, backend_hg):
153 153 repo = backend_hg['subrepos']
154 154 response = self.app.get(
155 155 route_path('repo_files',
156 156 repo_name=repo.repo_name,
157 157 commit_id='tip', f_path='/'))
158 158 assert_response = response.assert_response()
159 159 assert_response.contains_one_link(
160 160 'subpaths-path @ 000000000000',
161 161 'http://sub-base.example.com/subpaths-path')
162 162
163 163 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
164 164 def test_files_menu(self, backend):
165 165 new_branch = "temp_branch_name"
166 166 commits = [
167 167 {'message': 'a'},
168 168 {'message': 'b', 'branch': new_branch}
169 169 ]
170 170 backend.create_repo(commits)
171 171 backend.repo.landing_rev = "branch:%s" % new_branch
172 172 Session().commit()
173 173
174 174 # get response based on tip and not new commit
175 175 response = self.app.get(
176 176 route_path('repo_files',
177 177 repo_name=backend.repo_name,
178 178 commit_id='tip', f_path='/'))
179 179
180 180 # make sure Files menu url is not tip but new commit
181 181 landing_rev = backend.repo.landing_ref_name
182 182 files_url = route_path('repo_files:default_path',
183 183 repo_name=backend.repo_name,
184 184 commit_id=landing_rev, params={'at': landing_rev})
185 185
186 186 assert landing_rev != 'tip'
187 187 response.mustcontain(
188 188 '<li class="active"><a class="menulink" href="%s">' % files_url)
189 189
190 190 def test_show_files_commit(self, backend):
191 191 commit = backend.repo.get_commit(commit_idx=32)
192 192
193 193 response = self.app.get(
194 194 route_path('repo_files',
195 195 repo_name=backend.repo_name,
196 196 commit_id=commit.raw_id, f_path='/'))
197 197
198 198 dirs = ['docs', 'tests']
199 199 files = ['README.rst']
200 200 params = {
201 201 'repo_name': backend.repo_name,
202 202 'commit_id': commit.raw_id,
203 203 }
204 204 assert_dirs_in_response(response, dirs, params)
205 205 assert_files_in_response(response, files, params)
206 206
207 207 def test_show_files_different_branch(self, backend):
208 208 branches = dict(
209 209 hg=(150, ['git']),
210 210 # TODO: Git test repository does not contain other branches
211 211 git=(633, ['master']),
212 212 # TODO: Branch support in Subversion
213 213 svn=(150, [])
214 214 )
215 215 idx, branches = branches[backend.alias]
216 216 commit = backend.repo.get_commit(commit_idx=idx)
217 217 response = self.app.get(
218 218 route_path('repo_files',
219 219 repo_name=backend.repo_name,
220 220 commit_id=commit.raw_id, f_path='/'))
221 221
222 222 assert_response = response.assert_response()
223 223 for branch in branches:
224 224 assert_response.element_contains('.tags .branchtag', branch)
225 225
226 226 def test_show_files_paging(self, backend):
227 227 repo = backend.repo
228 228 indexes = [73, 92, 109, 1, 0]
229 229 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
230 230 for rev in indexes]
231 231
232 232 for idx in idx_map:
233 233 response = self.app.get(
234 234 route_path('repo_files',
235 235 repo_name=backend.repo_name,
236 236 commit_id=idx[1], f_path='/'))
237 237
238 238 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
239 239
240 240 def test_file_source(self, backend):
241 241 commit = backend.repo.get_commit(commit_idx=167)
242 242 response = self.app.get(
243 243 route_path('repo_files',
244 244 repo_name=backend.repo_name,
245 245 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
246 246
247 247 msgbox = """<div class="commit">%s</div>"""
248 248 response.mustcontain(msgbox % (commit.message, ))
249 249
250 250 assert_response = response.assert_response()
251 251 if commit.branch:
252 252 assert_response.element_contains(
253 253 '.tags.tags-main .branchtag', commit.branch)
254 254 if commit.tags:
255 255 for tag in commit.tags:
256 256 assert_response.element_contains('.tags.tags-main .tagtag', tag)
257 257
258 258 def test_file_source_annotated(self, backend):
259 259 response = self.app.get(
260 260 route_path('repo_files:annotated',
261 261 repo_name=backend.repo_name,
262 262 commit_id='tip', f_path='vcs/nodes.py'))
263 263 expected_commits = {
264 264 'hg': 'r356',
265 265 'git': 'r345',
266 266 'svn': 'r208',
267 267 }
268 268 response.mustcontain(expected_commits[backend.alias])
269 269
270 270 def test_file_source_authors(self, backend):
271 271 response = self.app.get(
272 272 route_path('repo_file_authors',
273 273 repo_name=backend.repo_name,
274 274 commit_id='tip', f_path='vcs/nodes.py'))
275 275 expected_authors = {
276 276 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
277 277 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
278 278 'svn': ('marcin', 'lukasz'),
279 279 }
280 280
281 281 for author in expected_authors[backend.alias]:
282 282 response.mustcontain(author)
283 283
284 284 def test_file_source_authors_with_annotation(self, backend):
285 285 response = self.app.get(
286 286 route_path('repo_file_authors',
287 287 repo_name=backend.repo_name,
288 288 commit_id='tip', f_path='vcs/nodes.py',
289 289 params=dict(annotate=1)))
290 290 expected_authors = {
291 291 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
292 292 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
293 293 'svn': ('marcin', 'lukasz'),
294 294 }
295 295
296 296 for author in expected_authors[backend.alias]:
297 297 response.mustcontain(author)
298 298
299 299 def test_file_source_history(self, backend, xhr_header):
300 300 response = self.app.get(
301 301 route_path('repo_file_history',
302 302 repo_name=backend.repo_name,
303 303 commit_id='tip', f_path='vcs/nodes.py'),
304 304 extra_environ=xhr_header)
305 305 assert get_node_history(backend.alias) == json.loads(response.body)
306 306
307 307 def test_file_source_history_svn(self, backend_svn, xhr_header):
308 308 simple_repo = backend_svn['svn-simple-layout']
309 309 response = self.app.get(
310 310 route_path('repo_file_history',
311 311 repo_name=simple_repo.repo_name,
312 312 commit_id='tip', f_path='trunk/example.py'),
313 313 extra_environ=xhr_header)
314 314
315 315 expected_data = json.loads(
316 316 fixture.load_resource('svn_node_history_branches.json'))
317 317
318 318 assert expected_data == response.json
319 319
320 320 def test_file_source_history_with_annotation(self, backend, xhr_header):
321 321 response = self.app.get(
322 322 route_path('repo_file_history',
323 323 repo_name=backend.repo_name,
324 324 commit_id='tip', f_path='vcs/nodes.py',
325 325 params=dict(annotate=1)),
326 326
327 327 extra_environ=xhr_header)
328 328 assert get_node_history(backend.alias) == json.loads(response.body)
329 329
330 330 def test_tree_search_top_level(self, backend, xhr_header):
331 331 commit = backend.repo.get_commit(commit_idx=173)
332 332 response = self.app.get(
333 333 route_path('repo_files_nodelist',
334 334 repo_name=backend.repo_name,
335 335 commit_id=commit.raw_id, f_path='/'),
336 336 extra_environ=xhr_header)
337 337 assert 'nodes' in response.json
338 338 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
339 339
340 340 def test_tree_search_missing_xhr(self, backend):
341 341 self.app.get(
342 342 route_path('repo_files_nodelist',
343 343 repo_name=backend.repo_name,
344 344 commit_id='tip', f_path='/'),
345 345 status=404)
346 346
347 347 def test_tree_search_at_path(self, backend, xhr_header):
348 348 commit = backend.repo.get_commit(commit_idx=173)
349 349 response = self.app.get(
350 350 route_path('repo_files_nodelist',
351 351 repo_name=backend.repo_name,
352 352 commit_id=commit.raw_id, f_path='/docs'),
353 353 extra_environ=xhr_header)
354 354 assert 'nodes' in response.json
355 355 nodes = response.json['nodes']
356 356 assert {'name': 'docs/api', 'type': 'dir'} in nodes
357 357 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
358 358
359 359 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
360 360 commit = backend.repo.get_commit(commit_idx=173)
361 361 response = self.app.get(
362 362 route_path('repo_files_nodelist',
363 363 repo_name=backend.repo_name,
364 364 commit_id=commit.raw_id, f_path='/docs/api'),
365 365 extra_environ=xhr_header)
366 366 assert 'nodes' in response.json
367 367 nodes = response.json['nodes']
368 368 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
369 369
370 370 def test_tree_search_at_path_missing_xhr(self, backend):
371 371 self.app.get(
372 372 route_path('repo_files_nodelist',
373 373 repo_name=backend.repo_name,
374 374 commit_id='tip', f_path='/docs'),
375 375 status=404)
376 376
377 377 def test_nodetree(self, backend, xhr_header):
378 378 commit = backend.repo.get_commit(commit_idx=173)
379 379 response = self.app.get(
380 380 route_path('repo_nodetree_full',
381 381 repo_name=backend.repo_name,
382 382 commit_id=commit.raw_id, f_path='/'),
383 383 extra_environ=xhr_header)
384 384
385 385 assert_response = response.assert_response()
386 386
387 387 for attr in ['data-commit-id', 'data-date', 'data-author']:
388 388 elements = assert_response.get_elements('[{}]'.format(attr))
389 389 assert len(elements) > 1
390 390
391 391 for element in elements:
392 392 assert element.get(attr)
393 393
394 394 def test_nodetree_if_file(self, backend, xhr_header):
395 395 commit = backend.repo.get_commit(commit_idx=173)
396 396 response = self.app.get(
397 397 route_path('repo_nodetree_full',
398 398 repo_name=backend.repo_name,
399 399 commit_id=commit.raw_id, f_path='README.rst'),
400 400 extra_environ=xhr_header)
401 assert response.body == ''
401 assert response.text == ''
402 402
403 403 def test_nodetree_wrong_path(self, backend, xhr_header):
404 404 commit = backend.repo.get_commit(commit_idx=173)
405 405 response = self.app.get(
406 406 route_path('repo_nodetree_full',
407 407 repo_name=backend.repo_name,
408 408 commit_id=commit.raw_id, f_path='/dont-exist'),
409 409 extra_environ=xhr_header)
410 410
411 411 err = 'error: There is no file nor ' \
412 412 'directory at the given path'
413 assert err in response.body
413 assert err in response.text
414 414
415 415 def test_nodetree_missing_xhr(self, backend):
416 416 self.app.get(
417 417 route_path('repo_nodetree_full',
418 418 repo_name=backend.repo_name,
419 419 commit_id='tip', f_path='/'),
420 420 status=404)
421 421
422 422
423 423 @pytest.mark.usefixtures("app", "autologin_user")
424 424 class TestRawFileHandling(object):
425 425
426 426 def test_download_file(self, backend):
427 427 commit = backend.repo.get_commit(commit_idx=173)
428 428 response = self.app.get(
429 429 route_path('repo_file_download',
430 430 repo_name=backend.repo_name,
431 431 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
432 432
433 433 assert response.content_disposition == 'attachment; filename="nodes.py"; filename*=UTF-8\'\'nodes.py'
434 434 assert response.content_type == "text/x-python"
435 435
436 436 def test_download_file_wrong_cs(self, backend):
437 437 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
438 438
439 439 response = self.app.get(
440 440 route_path('repo_file_download',
441 441 repo_name=backend.repo_name,
442 442 commit_id=raw_id, f_path='vcs/nodes.svg'),
443 443 status=404)
444 444
445 445 msg = """No such commit exists for this repository"""
446 446 response.mustcontain(msg)
447 447
448 448 def test_download_file_wrong_f_path(self, backend):
449 449 commit = backend.repo.get_commit(commit_idx=173)
450 450 f_path = 'vcs/ERRORnodes.py'
451 451
452 452 response = self.app.get(
453 453 route_path('repo_file_download',
454 454 repo_name=backend.repo_name,
455 455 commit_id=commit.raw_id, f_path=f_path),
456 456 status=404)
457 457
458 458 msg = (
459 459 "There is no file nor directory at the given path: "
460 460 "`%s` at commit %s" % (f_path, commit.short_id))
461 461 response.mustcontain(msg)
462 462
463 463 def test_file_raw(self, backend):
464 464 commit = backend.repo.get_commit(commit_idx=173)
465 465 response = self.app.get(
466 466 route_path('repo_file_raw',
467 467 repo_name=backend.repo_name,
468 468 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
469 469
470 470 assert response.content_type == "text/plain"
471 471
472 472 def test_file_raw_binary(self, backend):
473 473 commit = backend.repo.get_commit()
474 474 response = self.app.get(
475 475 route_path('repo_file_raw',
476 476 repo_name=backend.repo_name,
477 477 commit_id=commit.raw_id,
478 478 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
479 479
480 480 assert response.content_disposition == 'inline'
481 481
482 482 def test_raw_file_wrong_cs(self, backend):
483 483 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
484 484
485 485 response = self.app.get(
486 486 route_path('repo_file_raw',
487 487 repo_name=backend.repo_name,
488 488 commit_id=raw_id, f_path='vcs/nodes.svg'),
489 489 status=404)
490 490
491 491 msg = """No such commit exists for this repository"""
492 492 response.mustcontain(msg)
493 493
494 494 def test_raw_wrong_f_path(self, backend):
495 495 commit = backend.repo.get_commit(commit_idx=173)
496 496 f_path = 'vcs/ERRORnodes.py'
497 497 response = self.app.get(
498 498 route_path('repo_file_raw',
499 499 repo_name=backend.repo_name,
500 500 commit_id=commit.raw_id, f_path=f_path),
501 501 status=404)
502 502
503 503 msg = (
504 504 "There is no file nor directory at the given path: "
505 505 "`%s` at commit %s" % (f_path, commit.short_id))
506 506 response.mustcontain(msg)
507 507
508 508 def test_raw_svg_should_not_be_rendered(self, backend):
509 509 backend.create_repo()
510 510 backend.ensure_file("xss.svg")
511 511 response = self.app.get(
512 512 route_path('repo_file_raw',
513 513 repo_name=backend.repo_name,
514 514 commit_id='tip', f_path='xss.svg'),)
515 515 # If the content type is image/svg+xml then it allows to render HTML
516 516 # and malicious SVG.
517 517 assert response.content_type == "text/plain"
518 518
519 519
520 520 @pytest.mark.usefixtures("app")
521 521 class TestRepositoryArchival(object):
522 522
523 523 def test_archival(self, backend):
524 524 backend.enable_downloads()
525 525 commit = backend.repo.get_commit(commit_idx=173)
526 526 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
527 527
528 528 short = commit.short_id + extension
529 529 fname = commit.raw_id + extension
530 530 filename = '%s-%s' % (backend.repo_name, short)
531 531 response = self.app.get(
532 532 route_path('repo_archivefile',
533 533 repo_name=backend.repo_name,
534 534 fname=fname))
535 535
536 536 assert response.status == '200 OK'
537 537 headers = [
538 538 ('Content-Disposition', 'attachment; filename=%s' % filename),
539 539 ('Content-Type', '%s' % content_type),
540 540 ]
541 541
542 542 for header in headers:
543 543 assert header in response.headers.items()
544 544
545 545 def test_archival_no_hash(self, backend):
546 546 backend.enable_downloads()
547 547 commit = backend.repo.get_commit(commit_idx=173)
548 548 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
549 549
550 550 short = 'plain' + extension
551 551 fname = commit.raw_id + extension
552 552 filename = '%s-%s' % (backend.repo_name, short)
553 553 response = self.app.get(
554 554 route_path('repo_archivefile',
555 555 repo_name=backend.repo_name,
556 556 fname=fname, params={'with_hash': 0}))
557 557
558 558 assert response.status == '200 OK'
559 559 headers = [
560 560 ('Content-Disposition', 'attachment; filename=%s' % filename),
561 561 ('Content-Type', '%s' % content_type),
562 562 ]
563 563
564 564 for header in headers:
565 565 assert header in response.headers.items()
566 566
567 567 @pytest.mark.parametrize('arch_ext',[
568 568 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
569 569 def test_archival_wrong_ext(self, backend, arch_ext):
570 570 backend.enable_downloads()
571 571 commit = backend.repo.get_commit(commit_idx=173)
572 572
573 573 fname = commit.raw_id + '.' + arch_ext
574 574
575 575 response = self.app.get(
576 576 route_path('repo_archivefile',
577 577 repo_name=backend.repo_name,
578 578 fname=fname))
579 579 response.mustcontain(
580 580 'Unknown archive type for: `{}`'.format(fname))
581 581
582 582 @pytest.mark.parametrize('commit_id', [
583 583 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
584 584 def test_archival_wrong_commit_id(self, backend, commit_id):
585 585 backend.enable_downloads()
586 586 fname = '%s.zip' % commit_id
587 587
588 588 response = self.app.get(
589 589 route_path('repo_archivefile',
590 590 repo_name=backend.repo_name,
591 591 fname=fname))
592 592 response.mustcontain('Unknown commit_id')
593 593
594 594
595 595 @pytest.mark.usefixtures("app")
596 596 class TestFilesDiff(object):
597 597
598 598 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
599 599 def test_file_full_diff(self, backend, diff):
600 600 commit1 = backend.repo.get_commit(commit_idx=-1)
601 601 commit2 = backend.repo.get_commit(commit_idx=-2)
602 602
603 603 response = self.app.get(
604 604 route_path('repo_files_diff',
605 605 repo_name=backend.repo_name,
606 606 f_path='README'),
607 607 params={
608 608 'diff1': commit2.raw_id,
609 609 'diff2': commit1.raw_id,
610 610 'fulldiff': '1',
611 611 'diff': diff,
612 612 })
613 613
614 614 if diff == 'diff':
615 615 # use redirect since this is OLD view redirecting to compare page
616 616 response = response.follow()
617 617
618 618 # It's a symlink to README.rst
619 619 response.mustcontain('README.rst')
620 620 response.mustcontain('No newline at end of file')
621 621
622 622 def test_file_binary_diff(self, backend):
623 623 commits = [
624 624 {'message': 'First commit'},
625 625 {'message': 'Commit with binary',
626 626 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
627 627 ]
628 628 repo = backend.create_repo(commits=commits)
629 629
630 630 response = self.app.get(
631 631 route_path('repo_files_diff',
632 632 repo_name=backend.repo_name,
633 633 f_path='file.bin'),
634 634 params={
635 635 'diff1': repo.get_commit(commit_idx=0).raw_id,
636 636 'diff2': repo.get_commit(commit_idx=1).raw_id,
637 637 'fulldiff': '1',
638 638 'diff': 'diff',
639 639 })
640 640 # use redirect since this is OLD view redirecting to compare page
641 641 response = response.follow()
642 642 response.mustcontain('Collapse 1 commit')
643 643 file_changes = (1, 0, 0)
644 644
645 645 compare_page = ComparePage(response)
646 646 compare_page.contains_change_summary(*file_changes)
647 647
648 648 if backend.alias == 'svn':
649 649 response.mustcontain('new file 10644')
650 650 # TODO(marcink): SVN doesn't yet detect binary changes
651 651 else:
652 652 response.mustcontain('new file 100644')
653 653 response.mustcontain('binary diff hidden')
654 654
655 655 def test_diff_2way(self, backend):
656 656 commit1 = backend.repo.get_commit(commit_idx=-1)
657 657 commit2 = backend.repo.get_commit(commit_idx=-2)
658 658 response = self.app.get(
659 659 route_path('repo_files_diff_2way_redirect',
660 660 repo_name=backend.repo_name,
661 661 f_path='README'),
662 662 params={
663 663 'diff1': commit2.raw_id,
664 664 'diff2': commit1.raw_id,
665 665 })
666 666 # use redirect since this is OLD view redirecting to compare page
667 667 response = response.follow()
668 668
669 669 # It's a symlink to README.rst
670 670 response.mustcontain('README.rst')
671 671 response.mustcontain('No newline at end of file')
672 672
673 673 def test_requires_one_commit_id(self, backend, autologin_user):
674 674 response = self.app.get(
675 675 route_path('repo_files_diff',
676 676 repo_name=backend.repo_name,
677 677 f_path='README.rst'),
678 678 status=400)
679 679 response.mustcontain(
680 680 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
681 681
682 682 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
683 683 repo = vcsbackend.repo
684 684 response = self.app.get(
685 685 route_path('repo_files_diff',
686 686 repo_name=repo.name,
687 687 f_path='does-not-exist-in-any-commit'),
688 688 params={
689 689 'diff1': repo[0].raw_id,
690 690 'diff2': repo[1].raw_id
691 691 })
692 692
693 693 response = response.follow()
694 694 response.mustcontain('No files')
695 695
696 696 def test_returns_redirect_if_file_not_changed(self, backend):
697 697 commit = backend.repo.get_commit(commit_idx=-1)
698 698 response = self.app.get(
699 699 route_path('repo_files_diff_2way_redirect',
700 700 repo_name=backend.repo_name,
701 701 f_path='README'),
702 702 params={
703 703 'diff1': commit.raw_id,
704 704 'diff2': commit.raw_id,
705 705 })
706 706
707 707 response = response.follow()
708 708 response.mustcontain('No files')
709 709 response.mustcontain('No commits in this compare')
710 710
711 711 def test_supports_diff_to_different_path_svn(self, backend_svn):
712 712 #TODO: check this case
713 713 return
714 714
715 715 repo = backend_svn['svn-simple-layout'].scm_instance()
716 716 commit_id_1 = '24'
717 717 commit_id_2 = '26'
718 718
719 719 response = self.app.get(
720 720 route_path('repo_files_diff',
721 721 repo_name=backend_svn.repo_name,
722 722 f_path='trunk/example.py'),
723 723 params={
724 724 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
725 725 'diff2': commit_id_2,
726 726 })
727 727
728 728 response = response.follow()
729 729 response.mustcontain(
730 730 # diff contains this
731 731 "Will print out a useful message on invocation.")
732 732
733 733 # Note: Expecting that we indicate the user what's being compared
734 734 response.mustcontain("trunk/example.py")
735 735 response.mustcontain("tags/v0.2/example.py")
736 736
737 737 def test_show_rev_redirects_to_svn_path(self, backend_svn):
738 738 #TODO: check this case
739 739 return
740 740
741 741 repo = backend_svn['svn-simple-layout'].scm_instance()
742 742 commit_id = repo[-1].raw_id
743 743
744 744 response = self.app.get(
745 745 route_path('repo_files_diff',
746 746 repo_name=backend_svn.repo_name,
747 747 f_path='trunk/example.py'),
748 748 params={
749 749 'diff1': 'branches/argparse/example.py@' + commit_id,
750 750 'diff2': commit_id,
751 751 },
752 752 status=302)
753 753 response = response.follow()
754 754 assert response.headers['Location'].endswith(
755 755 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
756 756
757 757 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
758 758 #TODO: check this case
759 759 return
760 760
761 761 repo = backend_svn['svn-simple-layout'].scm_instance()
762 762 commit_id = repo[-1].raw_id
763 763 response = self.app.get(
764 764 route_path('repo_files_diff',
765 765 repo_name=backend_svn.repo_name,
766 766 f_path='trunk/example.py'),
767 767 params={
768 768 'diff1': 'branches/argparse/example.py@' + commit_id,
769 769 'diff2': commit_id,
770 770 'show_rev': 'Show at Revision',
771 771 'annotate': 'true',
772 772 },
773 773 status=302)
774 774 response = response.follow()
775 775 assert response.headers['Location'].endswith(
776 776 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
777 777
778 778
779 779 @pytest.mark.usefixtures("app", "autologin_user")
780 780 class TestModifyFilesWithWebInterface(object):
781 781
782 782 def test_add_file_view(self, backend):
783 783 self.app.get(
784 784 route_path('repo_files_add_file',
785 785 repo_name=backend.repo_name,
786 786 commit_id='tip', f_path='/')
787 787 )
788 788
789 789 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
790 790 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
791 791 backend.create_repo()
792 792 filename = 'init.py'
793 793 response = self.app.post(
794 794 route_path('repo_files_create_file',
795 795 repo_name=backend.repo_name,
796 796 commit_id='tip', f_path='/'),
797 797 params={
798 798 'content': "",
799 799 'filename': filename,
800 800 'csrf_token': csrf_token,
801 801 },
802 802 status=302)
803 803 expected_msg = 'Successfully committed new file `{}`'.format(os.path.join(filename))
804 804 assert_session_flash(response, expected_msg)
805 805
806 806 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
807 807 commit_id = backend.repo.get_commit().raw_id
808 808 response = self.app.post(
809 809 route_path('repo_files_create_file',
810 810 repo_name=backend.repo_name,
811 811 commit_id=commit_id, f_path='/'),
812 812 params={
813 813 'content': "foo",
814 814 'csrf_token': csrf_token,
815 815 },
816 816 status=302)
817 817
818 818 assert_session_flash(response, 'No filename specified')
819 819
820 820 def test_add_file_into_repo_errors_and_no_commits(
821 821 self, backend, csrf_token):
822 822 repo = backend.create_repo()
823 823 # Create a file with no filename, it will display an error but
824 824 # the repo has no commits yet
825 825 response = self.app.post(
826 826 route_path('repo_files_create_file',
827 827 repo_name=repo.repo_name,
828 828 commit_id='tip', f_path='/'),
829 829 params={
830 830 'content': "foo",
831 831 'csrf_token': csrf_token,
832 832 },
833 833 status=302)
834 834
835 835 assert_session_flash(response, 'No filename specified')
836 836
837 837 # Not allowed, redirect to the summary
838 838 redirected = response.follow()
839 839 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
840 840
841 841 # As there are no commits, displays the summary page with the error of
842 842 # creating a file with no filename
843 843
844 844 assert redirected.request.path == summary_url
845 845
846 846 @pytest.mark.parametrize("filename, clean_filename", [
847 847 ('/abs/foo', 'abs/foo'),
848 848 ('../rel/foo', 'rel/foo'),
849 849 ('file/../foo/foo', 'file/foo/foo'),
850 850 ])
851 851 def test_add_file_into_repo_bad_filenames(self, filename, clean_filename, backend, csrf_token):
852 852 repo = backend.create_repo()
853 853 commit_id = repo.get_commit().raw_id
854 854
855 855 response = self.app.post(
856 856 route_path('repo_files_create_file',
857 857 repo_name=repo.repo_name,
858 858 commit_id=commit_id, f_path='/'),
859 859 params={
860 860 'content': "foo",
861 861 'filename': filename,
862 862 'csrf_token': csrf_token,
863 863 },
864 864 status=302)
865 865
866 866 expected_msg = 'Successfully committed new file `{}`'.format(clean_filename)
867 867 assert_session_flash(response, expected_msg)
868 868
869 869 @pytest.mark.parametrize("cnt, filename, content", [
870 870 (1, 'foo.txt', "Content"),
871 871 (2, 'dir/foo.rst', "Content"),
872 872 (3, 'dir/foo-second.rst', "Content"),
873 873 (4, 'rel/dir/foo.bar', "Content"),
874 874 ])
875 875 def test_add_file_into_empty_repo(self, cnt, filename, content, backend, csrf_token):
876 876 repo = backend.create_repo()
877 877 commit_id = repo.get_commit().raw_id
878 878 response = self.app.post(
879 879 route_path('repo_files_create_file',
880 880 repo_name=repo.repo_name,
881 881 commit_id=commit_id, f_path='/'),
882 882 params={
883 883 'content': content,
884 884 'filename': filename,
885 885 'csrf_token': csrf_token,
886 886 },
887 887 status=302)
888 888
889 889 expected_msg = 'Successfully committed new file `{}`'.format(filename)
890 890 assert_session_flash(response, expected_msg)
891 891
892 892 def test_edit_file_view(self, backend):
893 893 response = self.app.get(
894 894 route_path('repo_files_edit_file',
895 895 repo_name=backend.repo_name,
896 896 commit_id=backend.default_head_id,
897 897 f_path='vcs/nodes.py'),
898 898 status=200)
899 899 response.mustcontain("Module holding everything related to vcs nodes.")
900 900
901 901 def test_edit_file_view_not_on_branch(self, backend):
902 902 repo = backend.create_repo()
903 903 backend.ensure_file("vcs/nodes.py")
904 904
905 905 response = self.app.get(
906 906 route_path('repo_files_edit_file',
907 907 repo_name=repo.repo_name,
908 908 commit_id='tip',
909 909 f_path='vcs/nodes.py'),
910 910 status=302)
911 911 assert_session_flash(
912 912 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
913 913
914 914 def test_edit_file_view_commit_changes(self, backend, csrf_token):
915 915 repo = backend.create_repo()
916 916 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
917 917
918 918 response = self.app.post(
919 919 route_path('repo_files_update_file',
920 920 repo_name=repo.repo_name,
921 921 commit_id=backend.default_head_id,
922 922 f_path='vcs/nodes.py'),
923 923 params={
924 924 'content': "print 'hello world'",
925 925 'message': 'I committed',
926 926 'filename': "vcs/nodes.py",
927 927 'csrf_token': csrf_token,
928 928 },
929 929 status=302)
930 930 assert_session_flash(
931 931 response, 'Successfully committed changes to file `vcs/nodes.py`')
932 932 tip = repo.get_commit(commit_idx=-1)
933 933 assert tip.message == 'I committed'
934 934
935 935 def test_edit_file_view_commit_changes_default_message(self, backend,
936 936 csrf_token):
937 937 repo = backend.create_repo()
938 938 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
939 939
940 940 commit_id = (
941 941 backend.default_branch_name or
942 942 backend.repo.scm_instance().commit_ids[-1])
943 943
944 944 response = self.app.post(
945 945 route_path('repo_files_update_file',
946 946 repo_name=repo.repo_name,
947 947 commit_id=commit_id,
948 948 f_path='vcs/nodes.py'),
949 949 params={
950 950 'content': "print 'hello world'",
951 951 'message': '',
952 952 'filename': "vcs/nodes.py",
953 953 'csrf_token': csrf_token,
954 954 },
955 955 status=302)
956 956 assert_session_flash(
957 957 response, 'Successfully committed changes to file `vcs/nodes.py`')
958 958 tip = repo.get_commit(commit_idx=-1)
959 959 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
960 960
961 961 def test_delete_file_view(self, backend):
962 962 self.app.get(
963 963 route_path('repo_files_remove_file',
964 964 repo_name=backend.repo_name,
965 965 commit_id=backend.default_head_id,
966 966 f_path='vcs/nodes.py'),
967 967 status=200)
968 968
969 969 def test_delete_file_view_not_on_branch(self, backend):
970 970 repo = backend.create_repo()
971 971 backend.ensure_file('vcs/nodes.py')
972 972
973 973 response = self.app.get(
974 974 route_path('repo_files_remove_file',
975 975 repo_name=repo.repo_name,
976 976 commit_id='tip',
977 977 f_path='vcs/nodes.py'),
978 978 status=302)
979 979 assert_session_flash(
980 980 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
981 981
982 982 def test_delete_file_view_commit_changes(self, backend, csrf_token):
983 983 repo = backend.create_repo()
984 984 backend.ensure_file("vcs/nodes.py")
985 985
986 986 response = self.app.post(
987 987 route_path('repo_files_delete_file',
988 988 repo_name=repo.repo_name,
989 989 commit_id=backend.default_head_id,
990 990 f_path='vcs/nodes.py'),
991 991 params={
992 992 'message': 'i committed',
993 993 'csrf_token': csrf_token,
994 994 },
995 995 status=302)
996 996 assert_session_flash(
997 997 response, 'Successfully deleted file `vcs/nodes.py`')
998 998
999 999
1000 1000 @pytest.mark.usefixtures("app")
1001 1001 class TestFilesViewOtherCases(object):
1002 1002
1003 1003 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
1004 1004 self, backend_stub, autologin_regular_user, user_regular,
1005 1005 user_util):
1006 1006
1007 1007 repo = backend_stub.create_repo()
1008 1008 user_util.grant_user_permission_to_repo(
1009 1009 repo, user_regular, 'repository.write')
1010 1010 response = self.app.get(
1011 1011 route_path('repo_files',
1012 1012 repo_name=repo.repo_name,
1013 1013 commit_id='tip', f_path='/'))
1014 1014
1015 1015 repo_file_add_url = route_path(
1016 1016 'repo_files_add_file',
1017 1017 repo_name=repo.repo_name,
1018 1018 commit_id=0, f_path='')
1019 1019
1020 1020 assert_session_flash(
1021 1021 response,
1022 1022 'There are no files yet. <a class="alert-link" '
1023 1023 'href="{}">Click here to add a new file.</a>'
1024 1024 .format(repo_file_add_url))
1025 1025
1026 1026 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1027 1027 self, backend_stub, autologin_regular_user):
1028 1028 repo = backend_stub.create_repo()
1029 1029 # init session for anon user
1030 1030 route_path('repo_summary', repo_name=repo.repo_name)
1031 1031
1032 1032 repo_file_add_url = route_path(
1033 1033 'repo_files_add_file',
1034 1034 repo_name=repo.repo_name,
1035 1035 commit_id=0, f_path='')
1036 1036
1037 1037 response = self.app.get(
1038 1038 route_path('repo_files',
1039 1039 repo_name=repo.repo_name,
1040 1040 commit_id='tip', f_path='/'))
1041 1041
1042 1042 assert_session_flash(response, no_=repo_file_add_url)
1043 1043
1044 1044 @pytest.mark.parametrize('file_node', [
1045 1045 'archive/file.zip',
1046 1046 'diff/my-file.txt',
1047 1047 'render.py',
1048 1048 'render',
1049 1049 'remove_file',
1050 1050 'remove_file/to-delete.txt',
1051 1051 ])
1052 1052 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1053 1053 backend.create_repo()
1054 1054 backend.ensure_file(file_node)
1055 1055
1056 1056 self.app.get(
1057 1057 route_path('repo_files',
1058 1058 repo_name=backend.repo_name,
1059 1059 commit_id='tip', f_path=file_node),
1060 1060 status=200)
1061 1061
1062 1062
1063 1063 class TestAdjustFilePathForSvn(object):
1064 1064 """
1065 1065 SVN specific adjustments of node history in RepoFilesView.
1066 1066 """
1067 1067
1068 1068 def test_returns_path_relative_to_matched_reference(self):
1069 1069 repo = self._repo(branches=['trunk'])
1070 1070 self.assert_file_adjustment('trunk/file', 'file', repo)
1071 1071
1072 1072 def test_does_not_modify_file_if_no_reference_matches(self):
1073 1073 repo = self._repo(branches=['trunk'])
1074 1074 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1075 1075
1076 1076 def test_does_not_adjust_partial_directory_names(self):
1077 1077 repo = self._repo(branches=['trun'])
1078 1078 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1079 1079
1080 1080 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1081 1081 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1082 1082 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1083 1083
1084 1084 def assert_file_adjustment(self, f_path, expected, repo):
1085 1085 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1086 1086 assert result == expected
1087 1087
1088 1088 def _repo(self, branches=None):
1089 1089 repo = mock.Mock()
1090 1090 repo.branches = OrderedDict((name, '0') for name in branches or [])
1091 1091 repo.tags = {}
1092 1092 return repo
@@ -1,149 +1,149 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib.utils2 import md5
24 24 from rhodecode.model.db import Repository
25 25 from rhodecode.model.meta import Session
26 26 from rhodecode.model.settings import SettingsModel, IssueTrackerSettingsModel
27 27
28 28
29 29 def route_path(name, params=None, **kwargs):
30 30 import urllib.request, urllib.parse, urllib.error
31 31
32 32 base_url = {
33 33 'repo_summary': '/{repo_name}',
34 34 'edit_repo_issuetracker': '/{repo_name}/settings/issue_trackers',
35 35 'edit_repo_issuetracker_test': '/{repo_name}/settings/issue_trackers/test',
36 36 'edit_repo_issuetracker_delete': '/{repo_name}/settings/issue_trackers/delete',
37 37 'edit_repo_issuetracker_update': '/{repo_name}/settings/issue_trackers/update',
38 38 }[name].format(**kwargs)
39 39
40 40 if params:
41 41 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
42 42 return base_url
43 43
44 44
45 45 @pytest.mark.usefixtures("app")
46 46 class TestRepoIssueTracker(object):
47 47 def test_issuetracker_index(self, autologin_user, backend):
48 48 repo = backend.create_repo()
49 49 response = self.app.get(route_path('edit_repo_issuetracker',
50 50 repo_name=repo.repo_name))
51 51 assert response.status_code == 200
52 52
53 53 def test_add_and_test_issuetracker_patterns(
54 54 self, autologin_user, backend, csrf_token, request, xhr_header):
55 55 pattern = 'issuetracker_pat'
56 56 another_pattern = pattern+'1'
57 57 post_url = route_path(
58 58 'edit_repo_issuetracker_update', repo_name=backend.repo.repo_name)
59 59 post_data = {
60 60 'new_pattern_pattern_0': pattern,
61 61 'new_pattern_url_0': 'http://url',
62 62 'new_pattern_prefix_0': 'prefix',
63 63 'new_pattern_description_0': 'description',
64 64 'new_pattern_pattern_1': another_pattern,
65 65 'new_pattern_url_1': '/url1',
66 66 'new_pattern_prefix_1': 'prefix1',
67 67 'new_pattern_description_1': 'description1',
68 68 'csrf_token': csrf_token
69 69 }
70 70 self.app.post(post_url, post_data, status=302)
71 71 self.settings_model = IssueTrackerSettingsModel(repo=backend.repo)
72 72 settings = self.settings_model.get_repo_settings()
73 73 self.uid = md5(pattern)
74 74 assert settings[self.uid]['pat'] == pattern
75 75 self.another_uid = md5(another_pattern)
76 76 assert settings[self.another_uid]['pat'] == another_pattern
77 77
78 78 # test pattern
79 79 data = {'test_text': 'example of issuetracker_pat replacement',
80 80 'csrf_token': csrf_token}
81 81 response = self.app.post(
82 82 route_path('edit_repo_issuetracker_test',
83 83 repo_name=backend.repo.repo_name),
84 84 extra_environ=xhr_header, params=data)
85 85
86 assert response.body == \
86 assert response.text == \
87 87 'example of <a class="tooltip issue-tracker-link" href="http://url" title="description">prefix</a> replacement'
88 88
89 89 @request.addfinalizer
90 90 def cleanup():
91 91 self.settings_model.delete_entries(self.uid)
92 92 self.settings_model.delete_entries(self.another_uid)
93 93
94 94 def test_edit_issuetracker_pattern(
95 95 self, autologin_user, backend, csrf_token, request):
96 96 entry_key = 'issuetracker_pat_'
97 97 pattern = 'issuetracker_pat2'
98 98 old_pattern = 'issuetracker_pat'
99 99 old_uid = md5(old_pattern)
100 100
101 101 sett = SettingsModel(repo=backend.repo).create_or_update_setting(
102 102 entry_key+old_uid, old_pattern, 'unicode')
103 103 Session().add(sett)
104 104 Session().commit()
105 105 post_url = route_path(
106 106 'edit_repo_issuetracker_update', repo_name=backend.repo.repo_name)
107 107 post_data = {
108 108 'new_pattern_pattern_0': pattern,
109 109 'new_pattern_url_0': '/url',
110 110 'new_pattern_prefix_0': 'prefix',
111 111 'new_pattern_description_0': 'description',
112 112 'uid': old_uid,
113 113 'csrf_token': csrf_token
114 114 }
115 115 self.app.post(post_url, post_data, status=302)
116 116 self.settings_model = IssueTrackerSettingsModel(repo=backend.repo)
117 117 settings = self.settings_model.get_repo_settings()
118 118 self.uid = md5(pattern)
119 119 assert settings[self.uid]['pat'] == pattern
120 120 with pytest.raises(KeyError):
121 121 key = settings[old_uid]
122 122
123 123 @request.addfinalizer
124 124 def cleanup():
125 125 self.settings_model.delete_entries(self.uid)
126 126
127 127 def test_delete_issuetracker_pattern(
128 128 self, autologin_user, backend, csrf_token, settings_util, xhr_header):
129 129 repo = backend.create_repo()
130 130 repo_name = repo.repo_name
131 131 entry_key = 'issuetracker_pat_'
132 132 pattern = 'issuetracker_pat3'
133 133 uid = md5(pattern)
134 134 settings_util.create_repo_rhodecode_setting(
135 135 repo=backend.repo, name=entry_key+uid,
136 136 value=entry_key, type_='unicode', cleanup=False)
137 137
138 138 self.app.post(
139 139 route_path(
140 140 'edit_repo_issuetracker_delete',
141 141 repo_name=backend.repo.repo_name),
142 142 {
143 143 'uid': uid,
144 144 'csrf_token': csrf_token,
145 145 '': ''
146 146 }, extra_environ=xhr_header, status=200)
147 147 settings = IssueTrackerSettingsModel(
148 148 repo=Repository.get_by_repo_name(repo_name)).get_repo_settings()
149 149 assert 'rhodecode_%s%s' % (entry_key, uid) not in settings
@@ -1,1680 +1,1680 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib import helpers as h
25 25 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
26 26 from rhodecode.lib.vcs.nodes import FileNode
27 27 from rhodecode.lib.ext_json import json
28 28 from rhodecode.model.changeset_status import ChangesetStatusModel
29 29 from rhodecode.model.db import (
30 30 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
31 31 from rhodecode.model.meta import Session
32 32 from rhodecode.model.pull_request import PullRequestModel
33 33 from rhodecode.model.user import UserModel
34 34 from rhodecode.model.comment import CommentsModel
35 35 from rhodecode.tests import (
36 36 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
37 37
38 38
39 39 def route_path(name, params=None, **kwargs):
40 40 import urllib.request, urllib.parse, urllib.error
41 41
42 42 base_url = {
43 43 'repo_changelog': '/{repo_name}/changelog',
44 44 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
45 45 'repo_commits': '/{repo_name}/commits',
46 46 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
47 47 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
48 48 'pullrequest_show_all': '/{repo_name}/pull-request',
49 49 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
50 50 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
51 51 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
52 52 'pullrequest_new': '/{repo_name}/pull-request/new',
53 53 'pullrequest_create': '/{repo_name}/pull-request/create',
54 54 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
55 55 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
56 56 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
57 57 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
58 58 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
59 59 'pullrequest_comment_edit': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit',
60 60 }[name].format(**kwargs)
61 61
62 62 if params:
63 63 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
64 64 return base_url
65 65
66 66
67 67 @pytest.mark.usefixtures('app', 'autologin_user')
68 68 @pytest.mark.backends("git", "hg")
69 69 class TestPullrequestsView(object):
70 70
71 71 def test_index(self, backend):
72 72 self.app.get(route_path(
73 73 'pullrequest_new',
74 74 repo_name=backend.repo_name))
75 75
76 76 def test_option_menu_create_pull_request_exists(self, backend):
77 77 repo_name = backend.repo_name
78 78 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
79 79
80 80 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
81 81 'pullrequest_new', repo_name=repo_name)
82 82 response.mustcontain(create_pr_link)
83 83
84 84 def test_create_pr_form_with_raw_commit_id(self, backend):
85 85 repo = backend.repo
86 86
87 87 self.app.get(
88 88 route_path('pullrequest_new', repo_name=repo.repo_name,
89 89 commit=repo.get_commit().raw_id),
90 90 status=200)
91 91
92 92 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
93 93 @pytest.mark.parametrize('range_diff', ["0", "1"])
94 94 def test_show(self, pr_util, pr_merge_enabled, range_diff):
95 95 pull_request = pr_util.create_pull_request(
96 96 mergeable=pr_merge_enabled, enable_notifications=False)
97 97
98 98 response = self.app.get(route_path(
99 99 'pullrequest_show',
100 100 repo_name=pull_request.target_repo.scm_instance().name,
101 101 pull_request_id=pull_request.pull_request_id,
102 102 params={'range-diff': range_diff}))
103 103
104 104 for commit_id in pull_request.revisions:
105 105 response.mustcontain(commit_id)
106 106
107 107 response.mustcontain(pull_request.target_ref_parts.type)
108 108 response.mustcontain(pull_request.target_ref_parts.name)
109 109
110 110 response.mustcontain('class="pull-request-merge"')
111 111
112 112 if pr_merge_enabled:
113 113 response.mustcontain('Pull request reviewer approval is pending')
114 114 else:
115 115 response.mustcontain('Server-side pull request merging is disabled.')
116 116
117 117 if range_diff == "1":
118 118 response.mustcontain('Turn off: Show the diff as commit range')
119 119
120 120 def test_show_versions_of_pr(self, backend, csrf_token):
121 121 commits = [
122 122 {'message': 'initial-commit',
123 123 'added': [FileNode('test-file.txt', 'LINE1\n')]},
124 124
125 125 {'message': 'commit-1',
126 126 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\n')]},
127 127 # Above is the initial version of PR that changes a single line
128 128
129 129 # from now on we'll add 3x commit adding a nother line on each step
130 130 {'message': 'commit-2',
131 131 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\n')]},
132 132
133 133 {'message': 'commit-3',
134 134 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\n')]},
135 135
136 136 {'message': 'commit-4',
137 137 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]},
138 138 ]
139 139
140 140 commit_ids = backend.create_master_repo(commits)
141 141 target = backend.create_repo(heads=['initial-commit'])
142 142 source = backend.create_repo(heads=['commit-1'])
143 143 source_repo_name = source.repo_name
144 144 target_repo_name = target.repo_name
145 145
146 146 target_ref = 'branch:{branch}:{commit_id}'.format(
147 147 branch=backend.default_branch_name, commit_id=commit_ids['initial-commit'])
148 148 source_ref = 'branch:{branch}:{commit_id}'.format(
149 149 branch=backend.default_branch_name, commit_id=commit_ids['commit-1'])
150 150
151 151 response = self.app.post(
152 152 route_path('pullrequest_create', repo_name=source.repo_name),
153 153 [
154 154 ('source_repo', source_repo_name),
155 155 ('source_ref', source_ref),
156 156 ('target_repo', target_repo_name),
157 157 ('target_ref', target_ref),
158 158 ('common_ancestor', commit_ids['initial-commit']),
159 159 ('pullrequest_title', 'Title'),
160 160 ('pullrequest_desc', 'Description'),
161 161 ('description_renderer', 'markdown'),
162 162 ('__start__', 'review_members:sequence'),
163 163 ('__start__', 'reviewer:mapping'),
164 164 ('user_id', '1'),
165 165 ('__start__', 'reasons:sequence'),
166 166 ('reason', 'Some reason'),
167 167 ('__end__', 'reasons:sequence'),
168 168 ('__start__', 'rules:sequence'),
169 169 ('__end__', 'rules:sequence'),
170 170 ('mandatory', 'False'),
171 171 ('__end__', 'reviewer:mapping'),
172 172 ('__end__', 'review_members:sequence'),
173 173 ('__start__', 'revisions:sequence'),
174 174 ('revisions', commit_ids['commit-1']),
175 175 ('__end__', 'revisions:sequence'),
176 176 ('user', ''),
177 177 ('csrf_token', csrf_token),
178 178 ],
179 179 status=302)
180 180
181 181 location = response.headers['Location']
182 182
183 183 pull_request_id = location.rsplit('/', 1)[1]
184 184 assert pull_request_id != 'new'
185 185 pull_request = PullRequest.get(int(pull_request_id))
186 186
187 187 pull_request_id = pull_request.pull_request_id
188 188
189 189 # Show initial version of PR
190 190 response = self.app.get(
191 191 route_path('pullrequest_show',
192 192 repo_name=target_repo_name,
193 193 pull_request_id=pull_request_id))
194 194
195 195 response.mustcontain('commit-1')
196 196 response.mustcontain(no=['commit-2'])
197 197 response.mustcontain(no=['commit-3'])
198 198 response.mustcontain(no=['commit-4'])
199 199
200 200 response.mustcontain('cb-addition"></span><span>LINE2</span>')
201 201 response.mustcontain(no=['LINE3'])
202 202 response.mustcontain(no=['LINE4'])
203 203 response.mustcontain(no=['LINE5'])
204 204
205 205 # update PR #1
206 206 source_repo = Repository.get_by_repo_name(source_repo_name)
207 207 backend.pull_heads(source_repo, heads=['commit-2'])
208 208 response = self.app.post(
209 209 route_path('pullrequest_update',
210 210 repo_name=target_repo_name, pull_request_id=pull_request_id),
211 211 params={'update_commits': 'true', 'csrf_token': csrf_token})
212 212
213 213 # update PR #2
214 214 source_repo = Repository.get_by_repo_name(source_repo_name)
215 215 backend.pull_heads(source_repo, heads=['commit-3'])
216 216 response = self.app.post(
217 217 route_path('pullrequest_update',
218 218 repo_name=target_repo_name, pull_request_id=pull_request_id),
219 219 params={'update_commits': 'true', 'csrf_token': csrf_token})
220 220
221 221 # update PR #3
222 222 source_repo = Repository.get_by_repo_name(source_repo_name)
223 223 backend.pull_heads(source_repo, heads=['commit-4'])
224 224 response = self.app.post(
225 225 route_path('pullrequest_update',
226 226 repo_name=target_repo_name, pull_request_id=pull_request_id),
227 227 params={'update_commits': 'true', 'csrf_token': csrf_token})
228 228
229 229 # Show final version !
230 230 response = self.app.get(
231 231 route_path('pullrequest_show',
232 232 repo_name=target_repo_name,
233 233 pull_request_id=pull_request_id))
234 234
235 235 # 3 updates, and the latest == 4
236 236 response.mustcontain('4 versions available for this pull request')
237 237 response.mustcontain(no=['rhodecode diff rendering error'])
238 238
239 239 # initial show must have 3 commits, and 3 adds
240 240 response.mustcontain('commit-1')
241 241 response.mustcontain('commit-2')
242 242 response.mustcontain('commit-3')
243 243 response.mustcontain('commit-4')
244 244
245 245 response.mustcontain('cb-addition"></span><span>LINE2</span>')
246 246 response.mustcontain('cb-addition"></span><span>LINE3</span>')
247 247 response.mustcontain('cb-addition"></span><span>LINE4</span>')
248 248 response.mustcontain('cb-addition"></span><span>LINE5</span>')
249 249
250 250 # fetch versions
251 251 pr = PullRequest.get(pull_request_id)
252 252 versions = [x.pull_request_version_id for x in pr.versions.all()]
253 253 assert len(versions) == 3
254 254
255 255 # show v1,v2,v3,v4
256 256 def cb_line(text):
257 257 return 'cb-addition"></span><span>{}</span>'.format(text)
258 258
259 259 def cb_context(text):
260 260 return '<span class="cb-code"><span class="cb-action cb-context">' \
261 261 '</span><span>{}</span></span>'.format(text)
262 262
263 263 commit_tests = {
264 264 # in response, not in response
265 265 1: (['commit-1'], ['commit-2', 'commit-3', 'commit-4']),
266 266 2: (['commit-1', 'commit-2'], ['commit-3', 'commit-4']),
267 267 3: (['commit-1', 'commit-2', 'commit-3'], ['commit-4']),
268 268 4: (['commit-1', 'commit-2', 'commit-3', 'commit-4'], []),
269 269 }
270 270 diff_tests = {
271 271 1: (['LINE2'], ['LINE3', 'LINE4', 'LINE5']),
272 272 2: (['LINE2', 'LINE3'], ['LINE4', 'LINE5']),
273 273 3: (['LINE2', 'LINE3', 'LINE4'], ['LINE5']),
274 274 4: (['LINE2', 'LINE3', 'LINE4', 'LINE5'], []),
275 275 }
276 276 for idx, ver in enumerate(versions, 1):
277 277
278 278 response = self.app.get(
279 279 route_path('pullrequest_show',
280 280 repo_name=target_repo_name,
281 281 pull_request_id=pull_request_id,
282 282 params={'version': ver}))
283 283
284 284 response.mustcontain(no=['rhodecode diff rendering error'])
285 285 response.mustcontain('Showing changes at v{}'.format(idx))
286 286
287 287 yes, no = commit_tests[idx]
288 288 for y in yes:
289 289 response.mustcontain(y)
290 290 for n in no:
291 291 response.mustcontain(no=n)
292 292
293 293 yes, no = diff_tests[idx]
294 294 for y in yes:
295 295 response.mustcontain(cb_line(y))
296 296 for n in no:
297 297 response.mustcontain(no=n)
298 298
299 299 # show diff between versions
300 300 diff_compare_tests = {
301 301 1: (['LINE3'], ['LINE1', 'LINE2']),
302 302 2: (['LINE3', 'LINE4'], ['LINE1', 'LINE2']),
303 303 3: (['LINE3', 'LINE4', 'LINE5'], ['LINE1', 'LINE2']),
304 304 }
305 305 for idx, ver in enumerate(versions, 1):
306 306 adds, context = diff_compare_tests[idx]
307 307
308 308 to_ver = ver+1
309 309 if idx == 3:
310 310 to_ver = 'latest'
311 311
312 312 response = self.app.get(
313 313 route_path('pullrequest_show',
314 314 repo_name=target_repo_name,
315 315 pull_request_id=pull_request_id,
316 316 params={'from_version': versions[0], 'version': to_ver}))
317 317
318 318 response.mustcontain(no=['rhodecode diff rendering error'])
319 319
320 320 for a in adds:
321 321 response.mustcontain(cb_line(a))
322 322 for c in context:
323 323 response.mustcontain(cb_context(c))
324 324
325 325 # test version v2 -> v3
326 326 response = self.app.get(
327 327 route_path('pullrequest_show',
328 328 repo_name=target_repo_name,
329 329 pull_request_id=pull_request_id,
330 330 params={'from_version': versions[1], 'version': versions[2]}))
331 331
332 332 response.mustcontain(cb_context('LINE1'))
333 333 response.mustcontain(cb_context('LINE2'))
334 334 response.mustcontain(cb_context('LINE3'))
335 335 response.mustcontain(cb_line('LINE4'))
336 336
337 337 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
338 338 # Logout
339 339 response = self.app.post(
340 340 h.route_path('logout'),
341 341 params={'csrf_token': csrf_token})
342 342 # Login as regular user
343 343 response = self.app.post(h.route_path('login'),
344 344 {'username': TEST_USER_REGULAR_LOGIN,
345 345 'password': 'test12'})
346 346
347 347 pull_request = pr_util.create_pull_request(
348 348 author=TEST_USER_REGULAR_LOGIN)
349 349
350 350 response = self.app.get(route_path(
351 351 'pullrequest_show',
352 352 repo_name=pull_request.target_repo.scm_instance().name,
353 353 pull_request_id=pull_request.pull_request_id))
354 354
355 355 response.mustcontain('Server-side pull request merging is disabled.')
356 356
357 357 assert_response = response.assert_response()
358 358 # for regular user without a merge permissions, we don't see it
359 359 assert_response.no_element_exists('#close-pull-request-action')
360 360
361 361 user_util.grant_user_permission_to_repo(
362 362 pull_request.target_repo,
363 363 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
364 364 'repository.write')
365 365 response = self.app.get(route_path(
366 366 'pullrequest_show',
367 367 repo_name=pull_request.target_repo.scm_instance().name,
368 368 pull_request_id=pull_request.pull_request_id))
369 369
370 370 response.mustcontain('Server-side pull request merging is disabled.')
371 371
372 372 assert_response = response.assert_response()
373 373 # now regular user has a merge permissions, we have CLOSE button
374 374 assert_response.one_element_exists('#close-pull-request-action')
375 375
376 376 def test_show_invalid_commit_id(self, pr_util):
377 377 # Simulating invalid revisions which will cause a lookup error
378 378 pull_request = pr_util.create_pull_request()
379 379 pull_request.revisions = ['invalid']
380 380 Session().add(pull_request)
381 381 Session().commit()
382 382
383 383 response = self.app.get(route_path(
384 384 'pullrequest_show',
385 385 repo_name=pull_request.target_repo.scm_instance().name,
386 386 pull_request_id=pull_request.pull_request_id))
387 387
388 388 for commit_id in pull_request.revisions:
389 389 response.mustcontain(commit_id)
390 390
391 391 def test_show_invalid_source_reference(self, pr_util):
392 392 pull_request = pr_util.create_pull_request()
393 393 pull_request.source_ref = 'branch:b:invalid'
394 394 Session().add(pull_request)
395 395 Session().commit()
396 396
397 397 self.app.get(route_path(
398 398 'pullrequest_show',
399 399 repo_name=pull_request.target_repo.scm_instance().name,
400 400 pull_request_id=pull_request.pull_request_id))
401 401
402 402 def test_edit_title_description(self, pr_util, csrf_token):
403 403 pull_request = pr_util.create_pull_request()
404 404 pull_request_id = pull_request.pull_request_id
405 405
406 406 response = self.app.post(
407 407 route_path('pullrequest_update',
408 408 repo_name=pull_request.target_repo.repo_name,
409 409 pull_request_id=pull_request_id),
410 410 params={
411 411 'edit_pull_request': 'true',
412 412 'title': 'New title',
413 413 'description': 'New description',
414 414 'csrf_token': csrf_token})
415 415
416 416 assert_session_flash(
417 417 response, u'Pull request title & description updated.',
418 418 category='success')
419 419
420 420 pull_request = PullRequest.get(pull_request_id)
421 421 assert pull_request.title == 'New title'
422 422 assert pull_request.description == 'New description'
423 423
424 424 def test_edit_title_description(self, pr_util, csrf_token):
425 425 pull_request = pr_util.create_pull_request()
426 426 pull_request_id = pull_request.pull_request_id
427 427
428 428 response = self.app.post(
429 429 route_path('pullrequest_update',
430 430 repo_name=pull_request.target_repo.repo_name,
431 431 pull_request_id=pull_request_id),
432 432 params={
433 433 'edit_pull_request': 'true',
434 434 'title': 'New title {} {2} {foo}',
435 435 'description': 'New description',
436 436 'csrf_token': csrf_token})
437 437
438 438 assert_session_flash(
439 439 response, u'Pull request title & description updated.',
440 440 category='success')
441 441
442 442 pull_request = PullRequest.get(pull_request_id)
443 443 assert pull_request.title_safe == 'New title {{}} {{2}} {{foo}}'
444 444
445 445 def test_edit_title_description_closed(self, pr_util, csrf_token):
446 446 pull_request = pr_util.create_pull_request()
447 447 pull_request_id = pull_request.pull_request_id
448 448 repo_name = pull_request.target_repo.repo_name
449 449 pr_util.close()
450 450
451 451 response = self.app.post(
452 452 route_path('pullrequest_update',
453 453 repo_name=repo_name, pull_request_id=pull_request_id),
454 454 params={
455 455 'edit_pull_request': 'true',
456 456 'title': 'New title',
457 457 'description': 'New description',
458 458 'csrf_token': csrf_token}, status=200)
459 459 assert_session_flash(
460 460 response, u'Cannot update closed pull requests.',
461 461 category='error')
462 462
463 463 def test_update_invalid_source_reference(self, pr_util, csrf_token):
464 464 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
465 465
466 466 pull_request = pr_util.create_pull_request()
467 467 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
468 468 Session().add(pull_request)
469 469 Session().commit()
470 470
471 471 pull_request_id = pull_request.pull_request_id
472 472
473 473 response = self.app.post(
474 474 route_path('pullrequest_update',
475 475 repo_name=pull_request.target_repo.repo_name,
476 476 pull_request_id=pull_request_id),
477 477 params={'update_commits': 'true', 'csrf_token': csrf_token})
478 478
479 479 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
480 480 UpdateFailureReason.MISSING_SOURCE_REF])
481 481 assert_session_flash(response, expected_msg, category='error')
482 482
483 483 def test_missing_target_reference(self, pr_util, csrf_token):
484 484 from rhodecode.lib.vcs.backends.base import MergeFailureReason
485 485 pull_request = pr_util.create_pull_request(
486 486 approved=True, mergeable=True)
487 487 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
488 488 pull_request.target_ref = unicode_reference
489 489 Session().add(pull_request)
490 490 Session().commit()
491 491
492 492 pull_request_id = pull_request.pull_request_id
493 493 pull_request_url = route_path(
494 494 'pullrequest_show',
495 495 repo_name=pull_request.target_repo.repo_name,
496 496 pull_request_id=pull_request_id)
497 497
498 498 response = self.app.get(pull_request_url)
499 499 target_ref_id = 'invalid-branch'
500 500 merge_resp = MergeResponse(
501 501 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
502 502 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
503 503 response.assert_response().element_contains(
504 504 'div[data-role="merge-message"]', merge_resp.merge_status_message)
505 505
506 506 def test_comment_and_close_pull_request_custom_message_approved(
507 507 self, pr_util, csrf_token, xhr_header):
508 508
509 509 pull_request = pr_util.create_pull_request(approved=True)
510 510 pull_request_id = pull_request.pull_request_id
511 511 author = pull_request.user_id
512 512 repo = pull_request.target_repo.repo_id
513 513
514 514 self.app.post(
515 515 route_path('pullrequest_comment_create',
516 516 repo_name=pull_request.target_repo.scm_instance().name,
517 517 pull_request_id=pull_request_id),
518 518 params={
519 519 'close_pull_request': '1',
520 520 'text': 'Closing a PR',
521 521 'csrf_token': csrf_token},
522 522 extra_environ=xhr_header,)
523 523
524 524 journal = UserLog.query()\
525 525 .filter(UserLog.user_id == author)\
526 526 .filter(UserLog.repository_id == repo) \
527 527 .order_by(UserLog.user_log_id.asc()) \
528 528 .all()
529 529 assert journal[-1].action == 'repo.pull_request.close'
530 530
531 531 pull_request = PullRequest.get(pull_request_id)
532 532 assert pull_request.is_closed()
533 533
534 534 status = ChangesetStatusModel().get_status(
535 535 pull_request.source_repo, pull_request=pull_request)
536 536 assert status == ChangesetStatus.STATUS_APPROVED
537 537 comments = ChangesetComment().query() \
538 538 .filter(ChangesetComment.pull_request == pull_request) \
539 539 .order_by(ChangesetComment.comment_id.asc())\
540 540 .all()
541 541 assert comments[-1].text == 'Closing a PR'
542 542
543 543 def test_comment_force_close_pull_request_rejected(
544 544 self, pr_util, csrf_token, xhr_header):
545 545 pull_request = pr_util.create_pull_request()
546 546 pull_request_id = pull_request.pull_request_id
547 547 PullRequestModel().update_reviewers(
548 548 pull_request_id, [
549 549 (1, ['reason'], False, 'reviewer', []),
550 550 (2, ['reason2'], False, 'reviewer', [])],
551 551 pull_request.author)
552 552 author = pull_request.user_id
553 553 repo = pull_request.target_repo.repo_id
554 554
555 555 self.app.post(
556 556 route_path('pullrequest_comment_create',
557 557 repo_name=pull_request.target_repo.scm_instance().name,
558 558 pull_request_id=pull_request_id),
559 559 params={
560 560 'close_pull_request': '1',
561 561 'csrf_token': csrf_token},
562 562 extra_environ=xhr_header)
563 563
564 564 pull_request = PullRequest.get(pull_request_id)
565 565
566 566 journal = UserLog.query()\
567 567 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
568 568 .order_by(UserLog.user_log_id.asc()) \
569 569 .all()
570 570 assert journal[-1].action == 'repo.pull_request.close'
571 571
572 572 # check only the latest status, not the review status
573 573 status = ChangesetStatusModel().get_status(
574 574 pull_request.source_repo, pull_request=pull_request)
575 575 assert status == ChangesetStatus.STATUS_REJECTED
576 576
577 577 def test_comment_and_close_pull_request(
578 578 self, pr_util, csrf_token, xhr_header):
579 579 pull_request = pr_util.create_pull_request()
580 580 pull_request_id = pull_request.pull_request_id
581 581
582 582 response = self.app.post(
583 583 route_path('pullrequest_comment_create',
584 584 repo_name=pull_request.target_repo.scm_instance().name,
585 585 pull_request_id=pull_request.pull_request_id),
586 586 params={
587 587 'close_pull_request': 'true',
588 588 'csrf_token': csrf_token},
589 589 extra_environ=xhr_header)
590 590
591 591 assert response.json
592 592
593 593 pull_request = PullRequest.get(pull_request_id)
594 594 assert pull_request.is_closed()
595 595
596 596 # check only the latest status, not the review status
597 597 status = ChangesetStatusModel().get_status(
598 598 pull_request.source_repo, pull_request=pull_request)
599 599 assert status == ChangesetStatus.STATUS_REJECTED
600 600
601 601 def test_comment_and_close_pull_request_try_edit_comment(
602 602 self, pr_util, csrf_token, xhr_header
603 603 ):
604 604 pull_request = pr_util.create_pull_request()
605 605 pull_request_id = pull_request.pull_request_id
606 606 target_scm = pull_request.target_repo.scm_instance()
607 607 target_scm_name = target_scm.name
608 608
609 609 response = self.app.post(
610 610 route_path(
611 611 'pullrequest_comment_create',
612 612 repo_name=target_scm_name,
613 613 pull_request_id=pull_request_id,
614 614 ),
615 615 params={
616 616 'close_pull_request': 'true',
617 617 'csrf_token': csrf_token,
618 618 },
619 619 extra_environ=xhr_header)
620 620
621 621 assert response.json
622 622
623 623 pull_request = PullRequest.get(pull_request_id)
624 624 target_scm = pull_request.target_repo.scm_instance()
625 625 target_scm_name = target_scm.name
626 626 assert pull_request.is_closed()
627 627
628 628 # check only the latest status, not the review status
629 629 status = ChangesetStatusModel().get_status(
630 630 pull_request.source_repo, pull_request=pull_request)
631 631 assert status == ChangesetStatus.STATUS_REJECTED
632 632
633 633 for comment_id in response.json.keys():
634 634 test_text = 'test'
635 635 response = self.app.post(
636 636 route_path(
637 637 'pullrequest_comment_edit',
638 638 repo_name=target_scm_name,
639 639 pull_request_id=pull_request_id,
640 640 comment_id=comment_id,
641 641 ),
642 642 extra_environ=xhr_header,
643 643 params={
644 644 'csrf_token': csrf_token,
645 645 'text': test_text,
646 646 },
647 647 status=403,
648 648 )
649 649 assert response.status_int == 403
650 650
651 651 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
652 652 pull_request = pr_util.create_pull_request()
653 653 target_scm = pull_request.target_repo.scm_instance()
654 654 target_scm_name = target_scm.name
655 655
656 656 response = self.app.post(
657 657 route_path(
658 658 'pullrequest_comment_create',
659 659 repo_name=target_scm_name,
660 660 pull_request_id=pull_request.pull_request_id),
661 661 params={
662 662 'csrf_token': csrf_token,
663 663 'text': 'init',
664 664 },
665 665 extra_environ=xhr_header,
666 666 )
667 667 assert response.json
668 668
669 669 for comment_id in response.json.keys():
670 670 assert comment_id
671 671 test_text = 'test'
672 672 self.app.post(
673 673 route_path(
674 674 'pullrequest_comment_edit',
675 675 repo_name=target_scm_name,
676 676 pull_request_id=pull_request.pull_request_id,
677 677 comment_id=comment_id,
678 678 ),
679 679 extra_environ=xhr_header,
680 680 params={
681 681 'csrf_token': csrf_token,
682 682 'text': test_text,
683 683 'version': '0',
684 684 },
685 685
686 686 )
687 687 text_form_db = ChangesetComment.query().filter(
688 688 ChangesetComment.comment_id == comment_id).first().text
689 689 assert test_text == text_form_db
690 690
691 691 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
692 692 pull_request = pr_util.create_pull_request()
693 693 target_scm = pull_request.target_repo.scm_instance()
694 694 target_scm_name = target_scm.name
695 695
696 696 response = self.app.post(
697 697 route_path(
698 698 'pullrequest_comment_create',
699 699 repo_name=target_scm_name,
700 700 pull_request_id=pull_request.pull_request_id),
701 701 params={
702 702 'csrf_token': csrf_token,
703 703 'text': 'init',
704 704 },
705 705 extra_environ=xhr_header,
706 706 )
707 707 assert response.json
708 708
709 709 for comment_id in response.json.keys():
710 710 test_text = 'init'
711 711 response = self.app.post(
712 712 route_path(
713 713 'pullrequest_comment_edit',
714 714 repo_name=target_scm_name,
715 715 pull_request_id=pull_request.pull_request_id,
716 716 comment_id=comment_id,
717 717 ),
718 718 extra_environ=xhr_header,
719 719 params={
720 720 'csrf_token': csrf_token,
721 721 'text': test_text,
722 722 'version': '0',
723 723 },
724 724 status=404,
725 725
726 726 )
727 727 assert response.status_int == 404
728 728
729 729 def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header):
730 730 pull_request = pr_util.create_pull_request()
731 731 target_scm = pull_request.target_repo.scm_instance()
732 732 target_scm_name = target_scm.name
733 733
734 734 response = self.app.post(
735 735 route_path(
736 736 'pullrequest_comment_create',
737 737 repo_name=target_scm_name,
738 738 pull_request_id=pull_request.pull_request_id),
739 739 params={
740 740 'csrf_token': csrf_token,
741 741 'text': 'init',
742 742 },
743 743 extra_environ=xhr_header,
744 744 )
745 745 assert response.json
746 746 for comment_id in response.json.keys():
747 747 test_text = 'test'
748 748 self.app.post(
749 749 route_path(
750 750 'pullrequest_comment_edit',
751 751 repo_name=target_scm_name,
752 752 pull_request_id=pull_request.pull_request_id,
753 753 comment_id=comment_id,
754 754 ),
755 755 extra_environ=xhr_header,
756 756 params={
757 757 'csrf_token': csrf_token,
758 758 'text': test_text,
759 759 'version': '0',
760 760 },
761 761
762 762 )
763 763 test_text_v2 = 'test_v2'
764 764 response = self.app.post(
765 765 route_path(
766 766 'pullrequest_comment_edit',
767 767 repo_name=target_scm_name,
768 768 pull_request_id=pull_request.pull_request_id,
769 769 comment_id=comment_id,
770 770 ),
771 771 extra_environ=xhr_header,
772 772 params={
773 773 'csrf_token': csrf_token,
774 774 'text': test_text_v2,
775 775 'version': '0',
776 776 },
777 777 status=409,
778 778 )
779 779 assert response.status_int == 409
780 780
781 781 text_form_db = ChangesetComment.query().filter(
782 782 ChangesetComment.comment_id == comment_id).first().text
783 783
784 784 assert test_text == text_form_db
785 785 assert test_text_v2 != text_form_db
786 786
787 787 def test_comment_and_comment_edit_permissions_forbidden(
788 788 self, autologin_regular_user, user_regular, user_admin, pr_util,
789 789 csrf_token, xhr_header):
790 790 pull_request = pr_util.create_pull_request(
791 791 author=user_admin.username, enable_notifications=False)
792 792 comment = CommentsModel().create(
793 793 text='test',
794 794 repo=pull_request.target_repo.scm_instance().name,
795 795 user=user_admin,
796 796 pull_request=pull_request,
797 797 )
798 798 response = self.app.post(
799 799 route_path(
800 800 'pullrequest_comment_edit',
801 801 repo_name=pull_request.target_repo.scm_instance().name,
802 802 pull_request_id=pull_request.pull_request_id,
803 803 comment_id=comment.comment_id,
804 804 ),
805 805 extra_environ=xhr_header,
806 806 params={
807 807 'csrf_token': csrf_token,
808 808 'text': 'test_text',
809 809 },
810 810 status=403,
811 811 )
812 812 assert response.status_int == 403
813 813
814 814 def test_create_pull_request(self, backend, csrf_token):
815 815 commits = [
816 816 {'message': 'ancestor'},
817 817 {'message': 'change'},
818 818 {'message': 'change2'},
819 819 ]
820 820 commit_ids = backend.create_master_repo(commits)
821 821 target = backend.create_repo(heads=['ancestor'])
822 822 source = backend.create_repo(heads=['change2'])
823 823
824 824 response = self.app.post(
825 825 route_path('pullrequest_create', repo_name=source.repo_name),
826 826 [
827 827 ('source_repo', source.repo_name),
828 828 ('source_ref', 'branch:default:' + commit_ids['change2']),
829 829 ('target_repo', target.repo_name),
830 830 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
831 831 ('common_ancestor', commit_ids['ancestor']),
832 832 ('pullrequest_title', 'Title'),
833 833 ('pullrequest_desc', 'Description'),
834 834 ('description_renderer', 'markdown'),
835 835 ('__start__', 'review_members:sequence'),
836 836 ('__start__', 'reviewer:mapping'),
837 837 ('user_id', '1'),
838 838 ('__start__', 'reasons:sequence'),
839 839 ('reason', 'Some reason'),
840 840 ('__end__', 'reasons:sequence'),
841 841 ('__start__', 'rules:sequence'),
842 842 ('__end__', 'rules:sequence'),
843 843 ('mandatory', 'False'),
844 844 ('__end__', 'reviewer:mapping'),
845 845 ('__end__', 'review_members:sequence'),
846 846 ('__start__', 'revisions:sequence'),
847 847 ('revisions', commit_ids['change']),
848 848 ('revisions', commit_ids['change2']),
849 849 ('__end__', 'revisions:sequence'),
850 850 ('user', ''),
851 851 ('csrf_token', csrf_token),
852 852 ],
853 853 status=302)
854 854
855 855 location = response.headers['Location']
856 856 pull_request_id = location.rsplit('/', 1)[1]
857 857 assert pull_request_id != 'new'
858 858 pull_request = PullRequest.get(int(pull_request_id))
859 859
860 860 # check that we have now both revisions
861 861 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
862 862 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
863 863 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
864 864 assert pull_request.target_ref == expected_target_ref
865 865
866 866 def test_reviewer_notifications(self, backend, csrf_token):
867 867 # We have to use the app.post for this test so it will create the
868 868 # notifications properly with the new PR
869 869 commits = [
870 870 {'message': 'ancestor',
871 871 'added': [FileNode('file_A', content='content_of_ancestor')]},
872 872 {'message': 'change',
873 873 'added': [FileNode('file_a', content='content_of_change')]},
874 874 {'message': 'change-child'},
875 875 {'message': 'ancestor-child', 'parents': ['ancestor'],
876 876 'added': [
877 877 FileNode('file_B', content='content_of_ancestor_child')]},
878 878 {'message': 'ancestor-child-2'},
879 879 ]
880 880 commit_ids = backend.create_master_repo(commits)
881 881 target = backend.create_repo(heads=['ancestor-child'])
882 882 source = backend.create_repo(heads=['change'])
883 883
884 884 response = self.app.post(
885 885 route_path('pullrequest_create', repo_name=source.repo_name),
886 886 [
887 887 ('source_repo', source.repo_name),
888 888 ('source_ref', 'branch:default:' + commit_ids['change']),
889 889 ('target_repo', target.repo_name),
890 890 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
891 891 ('common_ancestor', commit_ids['ancestor']),
892 892 ('pullrequest_title', 'Title'),
893 893 ('pullrequest_desc', 'Description'),
894 894 ('description_renderer', 'markdown'),
895 895 ('__start__', 'review_members:sequence'),
896 896 ('__start__', 'reviewer:mapping'),
897 897 ('user_id', '2'),
898 898 ('__start__', 'reasons:sequence'),
899 899 ('reason', 'Some reason'),
900 900 ('__end__', 'reasons:sequence'),
901 901 ('__start__', 'rules:sequence'),
902 902 ('__end__', 'rules:sequence'),
903 903 ('mandatory', 'False'),
904 904 ('__end__', 'reviewer:mapping'),
905 905 ('__end__', 'review_members:sequence'),
906 906 ('__start__', 'revisions:sequence'),
907 907 ('revisions', commit_ids['change']),
908 908 ('__end__', 'revisions:sequence'),
909 909 ('user', ''),
910 910 ('csrf_token', csrf_token),
911 911 ],
912 912 status=302)
913 913
914 914 location = response.headers['Location']
915 915
916 916 pull_request_id = location.rsplit('/', 1)[1]
917 917 assert pull_request_id != 'new'
918 918 pull_request = PullRequest.get(int(pull_request_id))
919 919
920 920 # Check that a notification was made
921 921 notifications = Notification.query()\
922 922 .filter(Notification.created_by == pull_request.author.user_id,
923 923 Notification.type_ == Notification.TYPE_PULL_REQUEST,
924 924 Notification.subject.contains(
925 925 "requested a pull request review. !%s" % pull_request_id))
926 926 assert len(notifications.all()) == 1
927 927
928 928 # Change reviewers and check that a notification was made
929 929 PullRequestModel().update_reviewers(
930 930 pull_request.pull_request_id, [
931 931 (1, [], False, 'reviewer', [])
932 932 ],
933 933 pull_request.author)
934 934 assert len(notifications.all()) == 2
935 935
936 936 def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token):
937 937 commits = [
938 938 {'message': 'ancestor',
939 939 'added': [FileNode('file_A', content='content_of_ancestor')]},
940 940 {'message': 'change',
941 941 'added': [FileNode('file_a', content='content_of_change')]},
942 942 {'message': 'change-child'},
943 943 {'message': 'ancestor-child', 'parents': ['ancestor'],
944 944 'added': [
945 945 FileNode('file_B', content='content_of_ancestor_child')]},
946 946 {'message': 'ancestor-child-2'},
947 947 ]
948 948 commit_ids = backend.create_master_repo(commits)
949 949 target = backend.create_repo(heads=['ancestor-child'])
950 950 source = backend.create_repo(heads=['change'])
951 951
952 952 response = self.app.post(
953 953 route_path('pullrequest_create', repo_name=source.repo_name),
954 954 [
955 955 ('source_repo', source.repo_name),
956 956 ('source_ref', 'branch:default:' + commit_ids['change']),
957 957 ('target_repo', target.repo_name),
958 958 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
959 959 ('common_ancestor', commit_ids['ancestor']),
960 960 ('pullrequest_title', 'Title'),
961 961 ('pullrequest_desc', 'Description'),
962 962 ('description_renderer', 'markdown'),
963 963 ('__start__', 'review_members:sequence'),
964 964 ('__start__', 'reviewer:mapping'),
965 965 ('user_id', '1'),
966 966 ('__start__', 'reasons:sequence'),
967 967 ('reason', 'Some reason'),
968 968 ('__end__', 'reasons:sequence'),
969 969 ('__start__', 'rules:sequence'),
970 970 ('__end__', 'rules:sequence'),
971 971 ('mandatory', 'False'),
972 972 ('__end__', 'reviewer:mapping'),
973 973 ('__end__', 'review_members:sequence'),
974 974 ('__start__', 'revisions:sequence'),
975 975 ('revisions', commit_ids['change']),
976 976 ('__end__', 'revisions:sequence'),
977 977 ('user', ''),
978 978 ('csrf_token', csrf_token),
979 979 ],
980 980 status=302)
981 981
982 982 location = response.headers['Location']
983 983
984 984 pull_request_id = location.rsplit('/', 1)[1]
985 985 assert pull_request_id != 'new'
986 986 pull_request = PullRequest.get(int(pull_request_id))
987 987
988 988 # target_ref has to point to the ancestor's commit_id in order to
989 989 # show the correct diff
990 990 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
991 991 assert pull_request.target_ref == expected_target_ref
992 992
993 993 # Check generated diff contents
994 994 response = response.follow()
995 995 response.mustcontain(no=['content_of_ancestor'])
996 996 response.mustcontain(no=['content_of_ancestor-child'])
997 997 response.mustcontain('content_of_change')
998 998
999 999 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
1000 1000 # Clear any previous calls to rcextensions
1001 1001 rhodecode.EXTENSIONS.calls.clear()
1002 1002
1003 1003 pull_request = pr_util.create_pull_request(
1004 1004 approved=True, mergeable=True)
1005 1005 pull_request_id = pull_request.pull_request_id
1006 1006 repo_name = pull_request.target_repo.scm_instance().name,
1007 1007
1008 1008 url = route_path('pullrequest_merge',
1009 1009 repo_name=str(repo_name[0]),
1010 1010 pull_request_id=pull_request_id)
1011 1011 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
1012 1012
1013 1013 pull_request = PullRequest.get(pull_request_id)
1014 1014
1015 1015 assert response.status_int == 200
1016 1016 assert pull_request.is_closed()
1017 1017 assert_pull_request_status(
1018 1018 pull_request, ChangesetStatus.STATUS_APPROVED)
1019 1019
1020 1020 # Check the relevant log entries were added
1021 1021 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
1022 1022 actions = [log.action for log in user_logs]
1023 1023 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
1024 1024 expected_actions = [
1025 1025 u'repo.pull_request.close',
1026 1026 u'repo.pull_request.merge',
1027 1027 u'repo.pull_request.comment.create'
1028 1028 ]
1029 1029 assert actions == expected_actions
1030 1030
1031 1031 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
1032 1032 actions = [log for log in user_logs]
1033 1033 assert actions[-1].action == 'user.push'
1034 1034 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
1035 1035
1036 1036 # Check post_push rcextension was really executed
1037 1037 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
1038 1038 assert len(push_calls) == 1
1039 1039 unused_last_call_args, last_call_kwargs = push_calls[0]
1040 1040 assert last_call_kwargs['action'] == 'push'
1041 1041 assert last_call_kwargs['commit_ids'] == pr_commit_ids
1042 1042
1043 1043 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1044 1044 pull_request = pr_util.create_pull_request(mergeable=False)
1045 1045 pull_request_id = pull_request.pull_request_id
1046 1046 pull_request = PullRequest.get(pull_request_id)
1047 1047
1048 1048 response = self.app.post(
1049 1049 route_path('pullrequest_merge',
1050 1050 repo_name=pull_request.target_repo.scm_instance().name,
1051 1051 pull_request_id=pull_request.pull_request_id),
1052 1052 params={'csrf_token': csrf_token}).follow()
1053 1053
1054 1054 assert response.status_int == 200
1055 1055 response.mustcontain(
1056 1056 'Merge is not currently possible because of below failed checks.')
1057 1057 response.mustcontain('Server-side pull request merging is disabled.')
1058 1058
1059 1059 @pytest.mark.skip_backends('svn')
1060 1060 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
1061 1061 pull_request = pr_util.create_pull_request(mergeable=True)
1062 1062 pull_request_id = pull_request.pull_request_id
1063 1063 repo_name = pull_request.target_repo.scm_instance().name
1064 1064
1065 1065 response = self.app.post(
1066 1066 route_path('pullrequest_merge',
1067 1067 repo_name=repo_name, pull_request_id=pull_request_id),
1068 1068 params={'csrf_token': csrf_token}).follow()
1069 1069
1070 1070 assert response.status_int == 200
1071 1071
1072 1072 response.mustcontain(
1073 1073 'Merge is not currently possible because of below failed checks.')
1074 1074 response.mustcontain('Pull request reviewer approval is pending.')
1075 1075
1076 1076 def test_merge_pull_request_renders_failure_reason(
1077 1077 self, user_regular, csrf_token, pr_util):
1078 1078 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
1079 1079 pull_request_id = pull_request.pull_request_id
1080 1080 repo_name = pull_request.target_repo.scm_instance().name
1081 1081
1082 1082 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
1083 1083 MergeFailureReason.PUSH_FAILED,
1084 1084 metadata={'target': 'shadow repo',
1085 1085 'merge_commit': 'xxx'})
1086 1086 model_patcher = mock.patch.multiple(
1087 1087 PullRequestModel,
1088 1088 merge_repo=mock.Mock(return_value=merge_resp),
1089 1089 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
1090 1090
1091 1091 with model_patcher:
1092 1092 response = self.app.post(
1093 1093 route_path('pullrequest_merge',
1094 1094 repo_name=repo_name,
1095 1095 pull_request_id=pull_request_id),
1096 1096 params={'csrf_token': csrf_token}, status=302)
1097 1097
1098 1098 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
1099 1099 metadata={'target': 'shadow repo',
1100 1100 'merge_commit': 'xxx'})
1101 1101 assert_session_flash(response, merge_resp.merge_status_message)
1102 1102
1103 1103 def test_update_source_revision(self, backend, csrf_token):
1104 1104 commits = [
1105 1105 {'message': 'ancestor'},
1106 1106 {'message': 'change'},
1107 1107 {'message': 'change-2'},
1108 1108 ]
1109 1109 commit_ids = backend.create_master_repo(commits)
1110 1110 target = backend.create_repo(heads=['ancestor'])
1111 1111 source = backend.create_repo(heads=['change'])
1112 1112
1113 1113 # create pr from a in source to A in target
1114 1114 pull_request = PullRequest()
1115 1115
1116 1116 pull_request.source_repo = source
1117 1117 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1118 1118 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1119 1119
1120 1120 pull_request.target_repo = target
1121 1121 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1122 1122 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1123 1123
1124 1124 pull_request.revisions = [commit_ids['change']]
1125 1125 pull_request.title = u"Test"
1126 1126 pull_request.description = u"Description"
1127 1127 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1128 1128 pull_request.pull_request_state = PullRequest.STATE_CREATED
1129 1129 Session().add(pull_request)
1130 1130 Session().commit()
1131 1131 pull_request_id = pull_request.pull_request_id
1132 1132
1133 1133 # source has ancestor - change - change-2
1134 1134 backend.pull_heads(source, heads=['change-2'])
1135 1135 target_repo_name = target.repo_name
1136 1136
1137 1137 # update PR
1138 1138 self.app.post(
1139 1139 route_path('pullrequest_update',
1140 1140 repo_name=target_repo_name, pull_request_id=pull_request_id),
1141 1141 params={'update_commits': 'true', 'csrf_token': csrf_token})
1142 1142
1143 1143 response = self.app.get(
1144 1144 route_path('pullrequest_show',
1145 1145 repo_name=target_repo_name,
1146 1146 pull_request_id=pull_request.pull_request_id))
1147 1147
1148 1148 assert response.status_int == 200
1149 1149 response.mustcontain('Pull request updated to')
1150 1150 response.mustcontain('with 1 added, 0 removed commits.')
1151 1151
1152 1152 # check that we have now both revisions
1153 1153 pull_request = PullRequest.get(pull_request_id)
1154 1154 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
1155 1155
1156 1156 def test_update_target_revision(self, backend, csrf_token):
1157 1157 commits = [
1158 1158 {'message': 'ancestor'},
1159 1159 {'message': 'change'},
1160 1160 {'message': 'ancestor-new', 'parents': ['ancestor']},
1161 1161 {'message': 'change-rebased'},
1162 1162 ]
1163 1163 commit_ids = backend.create_master_repo(commits)
1164 1164 target = backend.create_repo(heads=['ancestor'])
1165 1165 source = backend.create_repo(heads=['change'])
1166 1166
1167 1167 # create pr from a in source to A in target
1168 1168 pull_request = PullRequest()
1169 1169
1170 1170 pull_request.source_repo = source
1171 1171 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1172 1172 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1173 1173
1174 1174 pull_request.target_repo = target
1175 1175 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1176 1176 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1177 1177
1178 1178 pull_request.revisions = [commit_ids['change']]
1179 1179 pull_request.title = u"Test"
1180 1180 pull_request.description = u"Description"
1181 1181 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1182 1182 pull_request.pull_request_state = PullRequest.STATE_CREATED
1183 1183
1184 1184 Session().add(pull_request)
1185 1185 Session().commit()
1186 1186 pull_request_id = pull_request.pull_request_id
1187 1187
1188 1188 # target has ancestor - ancestor-new
1189 1189 # source has ancestor - ancestor-new - change-rebased
1190 1190 backend.pull_heads(target, heads=['ancestor-new'])
1191 1191 backend.pull_heads(source, heads=['change-rebased'])
1192 1192 target_repo_name = target.repo_name
1193 1193
1194 1194 # update PR
1195 1195 url = route_path('pullrequest_update',
1196 1196 repo_name=target_repo_name,
1197 1197 pull_request_id=pull_request_id)
1198 1198 self.app.post(url,
1199 1199 params={'update_commits': 'true', 'csrf_token': csrf_token},
1200 1200 status=200)
1201 1201
1202 1202 # check that we have now both revisions
1203 1203 pull_request = PullRequest.get(pull_request_id)
1204 1204 assert pull_request.revisions == [commit_ids['change-rebased']]
1205 1205 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
1206 1206 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
1207 1207
1208 1208 response = self.app.get(
1209 1209 route_path('pullrequest_show',
1210 1210 repo_name=target_repo_name,
1211 1211 pull_request_id=pull_request.pull_request_id))
1212 1212 assert response.status_int == 200
1213 1213 response.mustcontain('Pull request updated to')
1214 1214 response.mustcontain('with 1 added, 1 removed commits.')
1215 1215
1216 1216 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
1217 1217 backend = backend_git
1218 1218 commits = [
1219 1219 {'message': 'master-commit-1'},
1220 1220 {'message': 'master-commit-2-change-1'},
1221 1221 {'message': 'master-commit-3-change-2'},
1222 1222
1223 1223 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
1224 1224 {'message': 'feat-commit-2'},
1225 1225 ]
1226 1226 commit_ids = backend.create_master_repo(commits)
1227 1227 target = backend.create_repo(heads=['master-commit-3-change-2'])
1228 1228 source = backend.create_repo(heads=['feat-commit-2'])
1229 1229
1230 1230 # create pr from a in source to A in target
1231 1231 pull_request = PullRequest()
1232 1232 pull_request.source_repo = source
1233 1233
1234 1234 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1235 1235 branch=backend.default_branch_name,
1236 1236 commit_id=commit_ids['master-commit-3-change-2'])
1237 1237
1238 1238 pull_request.target_repo = target
1239 1239 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1240 1240 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
1241 1241
1242 1242 pull_request.revisions = [
1243 1243 commit_ids['feat-commit-1'],
1244 1244 commit_ids['feat-commit-2']
1245 1245 ]
1246 1246 pull_request.title = u"Test"
1247 1247 pull_request.description = u"Description"
1248 1248 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1249 1249 pull_request.pull_request_state = PullRequest.STATE_CREATED
1250 1250 Session().add(pull_request)
1251 1251 Session().commit()
1252 1252 pull_request_id = pull_request.pull_request_id
1253 1253
1254 1254 # PR is created, now we simulate a force-push into target,
1255 1255 # that drops a 2 last commits
1256 1256 vcsrepo = target.scm_instance()
1257 1257 vcsrepo.config.clear_section('hooks')
1258 1258 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
1259 1259 target_repo_name = target.repo_name
1260 1260
1261 1261 # update PR
1262 1262 url = route_path('pullrequest_update',
1263 1263 repo_name=target_repo_name,
1264 1264 pull_request_id=pull_request_id)
1265 1265 self.app.post(url,
1266 1266 params={'update_commits': 'true', 'csrf_token': csrf_token},
1267 1267 status=200)
1268 1268
1269 1269 response = self.app.get(route_path('pullrequest_new', repo_name=target_repo_name))
1270 1270 assert response.status_int == 200
1271 1271 response.mustcontain('Pull request updated to')
1272 1272 response.mustcontain('with 0 added, 0 removed commits.')
1273 1273
1274 1274 def test_update_of_ancestor_reference(self, backend, csrf_token):
1275 1275 commits = [
1276 1276 {'message': 'ancestor'},
1277 1277 {'message': 'change'},
1278 1278 {'message': 'change-2'},
1279 1279 {'message': 'ancestor-new', 'parents': ['ancestor']},
1280 1280 {'message': 'change-rebased'},
1281 1281 ]
1282 1282 commit_ids = backend.create_master_repo(commits)
1283 1283 target = backend.create_repo(heads=['ancestor'])
1284 1284 source = backend.create_repo(heads=['change'])
1285 1285
1286 1286 # create pr from a in source to A in target
1287 1287 pull_request = PullRequest()
1288 1288 pull_request.source_repo = source
1289 1289
1290 1290 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1291 1291 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1292 1292 pull_request.target_repo = target
1293 1293 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1294 1294 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1295 1295 pull_request.revisions = [commit_ids['change']]
1296 1296 pull_request.title = u"Test"
1297 1297 pull_request.description = u"Description"
1298 1298 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1299 1299 pull_request.pull_request_state = PullRequest.STATE_CREATED
1300 1300 Session().add(pull_request)
1301 1301 Session().commit()
1302 1302 pull_request_id = pull_request.pull_request_id
1303 1303
1304 1304 # target has ancestor - ancestor-new
1305 1305 # source has ancestor - ancestor-new - change-rebased
1306 1306 backend.pull_heads(target, heads=['ancestor-new'])
1307 1307 backend.pull_heads(source, heads=['change-rebased'])
1308 1308 target_repo_name = target.repo_name
1309 1309
1310 1310 # update PR
1311 1311 self.app.post(
1312 1312 route_path('pullrequest_update',
1313 1313 repo_name=target_repo_name, pull_request_id=pull_request_id),
1314 1314 params={'update_commits': 'true', 'csrf_token': csrf_token},
1315 1315 status=200)
1316 1316
1317 1317 # Expect the target reference to be updated correctly
1318 1318 pull_request = PullRequest.get(pull_request_id)
1319 1319 assert pull_request.revisions == [commit_ids['change-rebased']]
1320 1320 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
1321 1321 branch=backend.default_branch_name,
1322 1322 commit_id=commit_ids['ancestor-new'])
1323 1323 assert pull_request.target_ref == expected_target_ref
1324 1324
1325 1325 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1326 1326 branch_name = 'development'
1327 1327 commits = [
1328 1328 {'message': 'initial-commit'},
1329 1329 {'message': 'old-feature'},
1330 1330 {'message': 'new-feature', 'branch': branch_name},
1331 1331 ]
1332 1332 repo = backend_git.create_repo(commits)
1333 1333 repo_name = repo.repo_name
1334 1334 commit_ids = backend_git.commit_ids
1335 1335
1336 1336 pull_request = PullRequest()
1337 1337 pull_request.source_repo = repo
1338 1338 pull_request.target_repo = repo
1339 1339 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1340 1340 branch=branch_name, commit_id=commit_ids['new-feature'])
1341 1341 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1342 1342 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
1343 1343 pull_request.revisions = [commit_ids['new-feature']]
1344 1344 pull_request.title = u"Test"
1345 1345 pull_request.description = u"Description"
1346 1346 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1347 1347 pull_request.pull_request_state = PullRequest.STATE_CREATED
1348 1348 Session().add(pull_request)
1349 1349 Session().commit()
1350 1350
1351 1351 pull_request_id = pull_request.pull_request_id
1352 1352
1353 1353 vcs = repo.scm_instance()
1354 1354 vcs.remove_ref('refs/heads/{}'.format(branch_name))
1355 1355 # NOTE(marcink): run GC to ensure the commits are gone
1356 1356 vcs.run_gc()
1357 1357
1358 1358 response = self.app.get(route_path(
1359 1359 'pullrequest_show',
1360 1360 repo_name=repo_name,
1361 1361 pull_request_id=pull_request_id))
1362 1362
1363 1363 assert response.status_int == 200
1364 1364
1365 1365 response.assert_response().element_contains(
1366 1366 '#changeset_compare_view_content .alert strong',
1367 1367 'Missing commits')
1368 1368 response.assert_response().element_contains(
1369 1369 '#changeset_compare_view_content .alert',
1370 1370 'This pull request cannot be displayed, because one or more'
1371 1371 ' commits no longer exist in the source repository.')
1372 1372
1373 1373 def test_strip_commits_from_pull_request(
1374 1374 self, backend, pr_util, csrf_token):
1375 1375 commits = [
1376 1376 {'message': 'initial-commit'},
1377 1377 {'message': 'old-feature'},
1378 1378 {'message': 'new-feature', 'parents': ['initial-commit']},
1379 1379 ]
1380 1380 pull_request = pr_util.create_pull_request(
1381 1381 commits, target_head='initial-commit', source_head='new-feature',
1382 1382 revisions=['new-feature'])
1383 1383
1384 1384 vcs = pr_util.source_repository.scm_instance()
1385 1385 if backend.alias == 'git':
1386 1386 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1387 1387 else:
1388 1388 vcs.strip(pr_util.commit_ids['new-feature'])
1389 1389
1390 1390 response = self.app.get(route_path(
1391 1391 'pullrequest_show',
1392 1392 repo_name=pr_util.target_repository.repo_name,
1393 1393 pull_request_id=pull_request.pull_request_id))
1394 1394
1395 1395 assert response.status_int == 200
1396 1396
1397 1397 response.assert_response().element_contains(
1398 1398 '#changeset_compare_view_content .alert strong',
1399 1399 'Missing commits')
1400 1400 response.assert_response().element_contains(
1401 1401 '#changeset_compare_view_content .alert',
1402 1402 'This pull request cannot be displayed, because one or more'
1403 1403 ' commits no longer exist in the source repository.')
1404 1404 response.assert_response().element_contains(
1405 1405 '#update_commits',
1406 1406 'Update commits')
1407 1407
1408 1408 def test_strip_commits_and_update(
1409 1409 self, backend, pr_util, csrf_token):
1410 1410 commits = [
1411 1411 {'message': 'initial-commit'},
1412 1412 {'message': 'old-feature'},
1413 1413 {'message': 'new-feature', 'parents': ['old-feature']},
1414 1414 ]
1415 1415 pull_request = pr_util.create_pull_request(
1416 1416 commits, target_head='old-feature', source_head='new-feature',
1417 1417 revisions=['new-feature'], mergeable=True)
1418 1418 pr_id = pull_request.pull_request_id
1419 1419 target_repo_name = pull_request.target_repo.repo_name
1420 1420
1421 1421 vcs = pr_util.source_repository.scm_instance()
1422 1422 if backend.alias == 'git':
1423 1423 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1424 1424 else:
1425 1425 vcs.strip(pr_util.commit_ids['new-feature'])
1426 1426
1427 1427 url = route_path('pullrequest_update',
1428 1428 repo_name=target_repo_name,
1429 1429 pull_request_id=pr_id)
1430 1430 response = self.app.post(url,
1431 1431 params={'update_commits': 'true',
1432 1432 'csrf_token': csrf_token})
1433 1433
1434 1434 assert response.status_int == 200
1435 1435 assert json.loads(response.body) == json.loads('{"response": true, "redirect_url": null}')
1436 1436
1437 1437 # Make sure that after update, it won't raise 500 errors
1438 1438 response = self.app.get(route_path(
1439 1439 'pullrequest_show',
1440 1440 repo_name=target_repo_name,
1441 1441 pull_request_id=pr_id))
1442 1442
1443 1443 assert response.status_int == 200
1444 1444 response.assert_response().element_contains(
1445 1445 '#changeset_compare_view_content .alert strong',
1446 1446 'Missing commits')
1447 1447
1448 1448 def test_branch_is_a_link(self, pr_util):
1449 1449 pull_request = pr_util.create_pull_request()
1450 1450 pull_request.source_ref = 'branch:origin:1234567890abcdef'
1451 1451 pull_request.target_ref = 'branch:target:abcdef1234567890'
1452 1452 Session().add(pull_request)
1453 1453 Session().commit()
1454 1454
1455 1455 response = self.app.get(route_path(
1456 1456 'pullrequest_show',
1457 1457 repo_name=pull_request.target_repo.scm_instance().name,
1458 1458 pull_request_id=pull_request.pull_request_id))
1459 1459 assert response.status_int == 200
1460 1460
1461 1461 source = response.assert_response().get_element('.pr-source-info')
1462 1462 source_parent = source.getparent()
1463 1463 assert len(source_parent) == 1
1464 1464
1465 1465 target = response.assert_response().get_element('.pr-target-info')
1466 1466 target_parent = target.getparent()
1467 1467 assert len(target_parent) == 1
1468 1468
1469 1469 expected_origin_link = route_path(
1470 1470 'repo_commits',
1471 1471 repo_name=pull_request.source_repo.scm_instance().name,
1472 1472 params=dict(branch='origin'))
1473 1473 expected_target_link = route_path(
1474 1474 'repo_commits',
1475 1475 repo_name=pull_request.target_repo.scm_instance().name,
1476 1476 params=dict(branch='target'))
1477 1477 assert source_parent.attrib['href'] == expected_origin_link
1478 1478 assert target_parent.attrib['href'] == expected_target_link
1479 1479
1480 1480 def test_bookmark_is_not_a_link(self, pr_util):
1481 1481 pull_request = pr_util.create_pull_request()
1482 1482 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1483 1483 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1484 1484 Session().add(pull_request)
1485 1485 Session().commit()
1486 1486
1487 1487 response = self.app.get(route_path(
1488 1488 'pullrequest_show',
1489 1489 repo_name=pull_request.target_repo.scm_instance().name,
1490 1490 pull_request_id=pull_request.pull_request_id))
1491 1491 assert response.status_int == 200
1492 1492
1493 1493 source = response.assert_response().get_element('.pr-source-info')
1494 1494 assert source.text.strip() == 'bookmark:origin'
1495 1495 assert source.getparent().attrib.get('href') is None
1496 1496
1497 1497 target = response.assert_response().get_element('.pr-target-info')
1498 1498 assert target.text.strip() == 'bookmark:target'
1499 1499 assert target.getparent().attrib.get('href') is None
1500 1500
1501 1501 def test_tag_is_not_a_link(self, pr_util):
1502 1502 pull_request = pr_util.create_pull_request()
1503 1503 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1504 1504 pull_request.target_ref = 'tag:target:abcdef1234567890'
1505 1505 Session().add(pull_request)
1506 1506 Session().commit()
1507 1507
1508 1508 response = self.app.get(route_path(
1509 1509 'pullrequest_show',
1510 1510 repo_name=pull_request.target_repo.scm_instance().name,
1511 1511 pull_request_id=pull_request.pull_request_id))
1512 1512 assert response.status_int == 200
1513 1513
1514 1514 source = response.assert_response().get_element('.pr-source-info')
1515 1515 assert source.text.strip() == 'tag:origin'
1516 1516 assert source.getparent().attrib.get('href') is None
1517 1517
1518 1518 target = response.assert_response().get_element('.pr-target-info')
1519 1519 assert target.text.strip() == 'tag:target'
1520 1520 assert target.getparent().attrib.get('href') is None
1521 1521
1522 1522 @pytest.mark.parametrize('mergeable', [True, False])
1523 1523 def test_shadow_repository_link(
1524 1524 self, mergeable, pr_util, http_host_only_stub):
1525 1525 """
1526 1526 Check that the pull request summary page displays a link to the shadow
1527 1527 repository if the pull request is mergeable. If it is not mergeable
1528 1528 the link should not be displayed.
1529 1529 """
1530 1530 pull_request = pr_util.create_pull_request(
1531 1531 mergeable=mergeable, enable_notifications=False)
1532 1532 target_repo = pull_request.target_repo.scm_instance()
1533 1533 pr_id = pull_request.pull_request_id
1534 1534 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1535 1535 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1536 1536
1537 1537 response = self.app.get(route_path(
1538 1538 'pullrequest_show',
1539 1539 repo_name=target_repo.name,
1540 1540 pull_request_id=pr_id))
1541 1541
1542 1542 if mergeable:
1543 1543 response.assert_response().element_value_contains(
1544 1544 'input.pr-mergeinfo', shadow_url)
1545 1545 response.assert_response().element_value_contains(
1546 1546 'input.pr-mergeinfo ', 'pr-merge')
1547 1547 else:
1548 1548 response.assert_response().no_element_exists('.pr-mergeinfo')
1549 1549
1550 1550
1551 1551 @pytest.mark.usefixtures('app')
1552 1552 @pytest.mark.backends("git", "hg")
1553 1553 class TestPullrequestsControllerDelete(object):
1554 1554 def test_pull_request_delete_button_permissions_admin(
1555 1555 self, autologin_user, user_admin, pr_util):
1556 1556 pull_request = pr_util.create_pull_request(
1557 1557 author=user_admin.username, enable_notifications=False)
1558 1558
1559 1559 response = self.app.get(route_path(
1560 1560 'pullrequest_show',
1561 1561 repo_name=pull_request.target_repo.scm_instance().name,
1562 1562 pull_request_id=pull_request.pull_request_id))
1563 1563
1564 1564 response.mustcontain('id="delete_pullrequest"')
1565 1565 response.mustcontain('Confirm to delete this pull request')
1566 1566
1567 1567 def test_pull_request_delete_button_permissions_owner(
1568 1568 self, autologin_regular_user, user_regular, pr_util):
1569 1569 pull_request = pr_util.create_pull_request(
1570 1570 author=user_regular.username, enable_notifications=False)
1571 1571
1572 1572 response = self.app.get(route_path(
1573 1573 'pullrequest_show',
1574 1574 repo_name=pull_request.target_repo.scm_instance().name,
1575 1575 pull_request_id=pull_request.pull_request_id))
1576 1576
1577 1577 response.mustcontain('id="delete_pullrequest"')
1578 1578 response.mustcontain('Confirm to delete this pull request')
1579 1579
1580 1580 def test_pull_request_delete_button_permissions_forbidden(
1581 1581 self, autologin_regular_user, user_regular, user_admin, pr_util):
1582 1582 pull_request = pr_util.create_pull_request(
1583 1583 author=user_admin.username, enable_notifications=False)
1584 1584
1585 1585 response = self.app.get(route_path(
1586 1586 'pullrequest_show',
1587 1587 repo_name=pull_request.target_repo.scm_instance().name,
1588 1588 pull_request_id=pull_request.pull_request_id))
1589 1589 response.mustcontain(no=['id="delete_pullrequest"'])
1590 1590 response.mustcontain(no=['Confirm to delete this pull request'])
1591 1591
1592 1592 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1593 1593 self, autologin_regular_user, user_regular, user_admin, pr_util,
1594 1594 user_util):
1595 1595
1596 1596 pull_request = pr_util.create_pull_request(
1597 1597 author=user_admin.username, enable_notifications=False)
1598 1598
1599 1599 user_util.grant_user_permission_to_repo(
1600 1600 pull_request.target_repo, user_regular,
1601 1601 'repository.write')
1602 1602
1603 1603 response = self.app.get(route_path(
1604 1604 'pullrequest_show',
1605 1605 repo_name=pull_request.target_repo.scm_instance().name,
1606 1606 pull_request_id=pull_request.pull_request_id))
1607 1607
1608 1608 response.mustcontain('id="open_edit_pullrequest"')
1609 1609 response.mustcontain('id="delete_pullrequest"')
1610 1610 response.mustcontain(no=['Confirm to delete this pull request'])
1611 1611
1612 1612 def test_delete_comment_returns_404_if_comment_does_not_exist(
1613 1613 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1614 1614
1615 1615 pull_request = pr_util.create_pull_request(
1616 1616 author=user_admin.username, enable_notifications=False)
1617 1617
1618 1618 self.app.post(
1619 1619 route_path(
1620 1620 'pullrequest_comment_delete',
1621 1621 repo_name=pull_request.target_repo.scm_instance().name,
1622 1622 pull_request_id=pull_request.pull_request_id,
1623 1623 comment_id=1024404),
1624 1624 extra_environ=xhr_header,
1625 1625 params={'csrf_token': csrf_token},
1626 1626 status=404
1627 1627 )
1628 1628
1629 1629 def test_delete_comment(
1630 1630 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1631 1631
1632 1632 pull_request = pr_util.create_pull_request(
1633 1633 author=user_admin.username, enable_notifications=False)
1634 1634 comment = pr_util.create_comment()
1635 1635 comment_id = comment.comment_id
1636 1636
1637 1637 response = self.app.post(
1638 1638 route_path(
1639 1639 'pullrequest_comment_delete',
1640 1640 repo_name=pull_request.target_repo.scm_instance().name,
1641 1641 pull_request_id=pull_request.pull_request_id,
1642 1642 comment_id=comment_id),
1643 1643 extra_environ=xhr_header,
1644 1644 params={'csrf_token': csrf_token},
1645 1645 status=200
1646 1646 )
1647 assert response.body == 'true'
1647 assert response.text == 'true'
1648 1648
1649 1649 @pytest.mark.parametrize('url_type', [
1650 1650 'pullrequest_new',
1651 1651 'pullrequest_create',
1652 1652 'pullrequest_update',
1653 1653 'pullrequest_merge',
1654 1654 ])
1655 1655 def test_pull_request_is_forbidden_on_archived_repo(
1656 1656 self, autologin_user, backend, xhr_header, user_util, url_type):
1657 1657
1658 1658 # create a temporary repo
1659 1659 source = user_util.create_repo(repo_type=backend.alias)
1660 1660 repo_name = source.repo_name
1661 1661 repo = Repository.get_by_repo_name(repo_name)
1662 1662 repo.archived = True
1663 1663 Session().commit()
1664 1664
1665 1665 response = self.app.get(
1666 1666 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1667 1667
1668 1668 msg = 'Action not supported for archived repository.'
1669 1669 assert_session_flash(response, msg)
1670 1670
1671 1671
1672 1672 def assert_pull_request_status(pull_request, expected_status):
1673 1673 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1674 1674 assert status == expected_status
1675 1675
1676 1676
1677 1677 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1678 1678 @pytest.mark.usefixtures("autologin_user")
1679 1679 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1680 1680 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
@@ -1,255 +1,251 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import time
23 23 import logging
24 24 import datetime
25 import hashlib
26 25 import tempfile
27 26 from os.path import join as jn
28
29 from tempfile import _RandomNameSequence
27 import urllib.parse
30 28
31 29 import pytest
32 30
33 31 from rhodecode.model.db import User
34 32 from rhodecode.lib import auth
35 33 from rhodecode.lib import helpers as h
36 34 from rhodecode.lib.helpers import flash
37 from rhodecode.lib.utils2 import safe_str
38
35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.lib.hash_utils import sha1_safe
39 37
40 38 log = logging.getLogger(__name__)
41 39
42 40 __all__ = [
43 41 'get_new_dir', 'TestController', 'route_path_generator',
44 42 'clear_cache_regions',
45 43 'assert_session_flash', 'login_user', 'no_newline_id_generator',
46 44 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
47 45 'NEW_HG_REPO', 'NEW_GIT_REPO',
48 46 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
49 47 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
50 48 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
51 49 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
52 50 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
53 51 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
54 52 ]
55 53
56 54
57 55 # SOME GLOBALS FOR TESTS
58 56 TEST_DIR = tempfile.gettempdir()
59 57
60 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_{}'.format(next(_RandomNameSequence())))
58 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_{}'.format(next(tempfile._RandomNameSequence())))
61 59 TEST_USER_ADMIN_LOGIN = 'test_admin'
62 60 TEST_USER_ADMIN_PASS = 'test12'
63 61 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
64 62
65 63 TEST_USER_REGULAR_LOGIN = 'test_regular'
66 64 TEST_USER_REGULAR_PASS = 'test12'
67 65 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
68 66
69 67 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
70 68 TEST_USER_REGULAR2_PASS = 'test12'
71 69 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
72 70
73 71 HG_REPO = 'vcs_test_hg'
74 72 GIT_REPO = 'vcs_test_git'
75 73 SVN_REPO = 'vcs_test_svn'
76 74
77 75 NEW_HG_REPO = 'vcs_test_hg_new'
78 76 NEW_GIT_REPO = 'vcs_test_git_new'
79 77
80 78 HG_FORK = 'vcs_test_hg_fork'
81 79 GIT_FORK = 'vcs_test_git_fork'
82 80
83 81 ## VCS
84 82 SCM_TESTS = ['hg', 'git']
85 83 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
86 84
87 85 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
88 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix)
89 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix)
86 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, f'vcsgitclone{uniq_suffix}')
87 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, f'vcsgitpull{uniq_suffix}')
90 88
91 89 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
92 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
93 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
90 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, f'vcshgclone{uniq_suffix}')
91 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, f'vcshgpull{uniq_suffix}')
94 92
95 93 TEST_REPO_PREFIX = 'vcs-test'
96 94
97 95
98 96 def clear_cache_regions(regions=None):
99 97 # dogpile
100 98 from rhodecode.lib.rc_cache import region_meta
101 99 for region_name, region in region_meta.dogpile_cache_regions.items():
102 100 if not regions or region_name in regions:
103 101 region.invalidate()
104 102
105 103
106 104 def get_new_dir(title):
107 105 """
108 106 Returns always new directory path.
109 107 """
110 108 from rhodecode.tests.vcs.utils import get_normalized_path
111 109 name_parts = [TEST_REPO_PREFIX]
112 110 if title:
113 111 name_parts.append(title)
114 hex_str = hashlib.sha1('%s %s' % (os.getpid(), time.time())).hexdigest()
112 hex_str = sha1_safe(f'{os.getpid()} {time.time()}')
115 113 name_parts.append(hex_str)
116 114 name = '-'.join(name_parts)
117 115 path = os.path.join(TEST_DIR, name)
118 116 return get_normalized_path(path)
119 117
120 118
121 119 def repo_id_generator(name):
122 120 numeric_hash = 0
123 121 for char in name:
124 122 numeric_hash += (ord(char))
125 123 return numeric_hash
126 124
127 125
128 126 @pytest.mark.usefixtures('app', 'index_location')
129 127 class TestController(object):
130 128
131 129 maxDiff = None
132 130
133 131 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
134 132 password=TEST_USER_ADMIN_PASS):
135 133 self._logged_username = username
136 134 self._session = login_user_session(self.app, username, password)
137 135 self.csrf_token = auth.get_csrf_token(self._session)
138 136
139 137 return self._session['rhodecode_user']
140 138
141 139 def logout_user(self):
142 140 logout_user_session(self.app, auth.get_csrf_token(self._session))
143 141 self.csrf_token = None
144 142 self._logged_username = None
145 143 self._session = None
146 144
147 145 def _get_logged_user(self):
148 146 return User.get_by_username(self._logged_username)
149 147
150 148
151 149 def login_user_session(
152 150 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
153 151
154 152 response = app.post(
155 153 h.route_path('login'),
156 154 {'username': username, 'password': password})
157 if 'invalid user name' in response.body:
158 pytest.fail('could not login using %s %s' % (username, password))
155 if 'invalid user name' in response.text:
156 pytest.fail(f'could not login using {username} {password}')
159 157
160 158 assert response.status == '302 Found'
161 159 response = response.follow()
162 160 assert response.status == '200 OK'
163 161
164 162 session = response.get_session_from_response()
165 163 assert 'rhodecode_user' in session
166 164 rc_user = session['rhodecode_user']
167 165 assert rc_user.get('username') == username
168 166 assert rc_user.get('is_authenticated')
169 167
170 168 return session
171 169
172 170
173 171 def logout_user_session(app, csrf_token):
174 172 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
175 173
176 174
177 175 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
178 176 password=TEST_USER_ADMIN_PASS):
179 177 return login_user_session(app, username, password)['rhodecode_user']
180 178
181 179
182 180 def assert_session_flash(response, msg=None, category=None, no_=None):
183 181 """
184 182 Assert on a flash message in the current session.
185 183
186 184 :param response: Response from give calll, it will contain flash
187 185 messages or bound session with them.
188 186 :param msg: The expected message. Will be evaluated if a
189 187 :class:`LazyString` is passed in.
190 188 :param category: Optional. If passed, the message category will be
191 189 checked as well.
192 190 :param no_: Optional. If passed, the message will be checked to NOT
193 191 be in the flash session
194 192 """
195 193 if msg is None and no_ is None:
196 194 raise ValueError("Parameter msg or no_ is required.")
197 195
198 196 if msg and no_:
199 197 raise ValueError("Please specify either msg or no_, but not both")
200 198
201 199 session = response.get_session_from_response()
202 200 messages = flash.pop_messages(session=session)
203 201 msg = _eval_if_lazy(msg)
204 202
205 203 if no_:
206 error_msg = 'unable to detect no_ message `%s` in empty flash list' % no_
204 error_msg = f'unable to detect no_ message `{no_}` in empty flash list'
207 205 else:
208 error_msg = 'unable to find message `%s` in empty flash list' % msg
206 error_msg = f'unable to find message `{msg}` in empty flash list'
209 207 assert messages, error_msg
210 208 message = messages[0]
211 209
212 210 message_text = _eval_if_lazy(message.message) or ''
213 211
214 212 if no_:
215 213 if no_ in message_text:
216 msg = u'msg `%s` found in session flash.' % (no_,)
214 msg = f'msg `{no_}` found in session flash.'
217 215 pytest.fail(safe_str(msg))
218 216 else:
219 217 if msg not in message_text:
220 fail_msg = u'msg `%s` not found in session ' \
221 u'flash: got `%s` (type:%s) instead' % (
222 msg, message_text, type(message_text))
218 fail_msg = f'msg `{msg}` not found in ' \
219 f'session flash: got `{message_text}` (type:{type(message_text)}) instead'
223 220
224 221 pytest.fail(safe_str(fail_msg))
225 222 if category:
226 223 assert category == message.category
227 224
228 225
229 226 def _eval_if_lazy(value):
230 227 return value.eval() if hasattr(value, 'eval') else value
231 228
232 229
233 230 def no_newline_id_generator(test_name):
234 231 """
235 232 Generates a test name without spaces or newlines characters. Used for
236 233 nicer output of progress of test
237 234 """
238 org_name = test_name
235
239 236 test_name = safe_str(test_name)\
240 237 .replace('\n', '_N') \
241 238 .replace('\r', '_N') \
242 239 .replace('\t', '_T') \
243 240 .replace(' ', '_S')
244 241
245 242 return test_name or 'test-with-empty-name'
246 243
247 244
248 245 def route_path_generator(url_defs, name, params=None, **kwargs):
249 import urllib.request, urllib.parse, urllib.error
250 246
251 247 base_url = url_defs[name].format(**kwargs)
252 248
253 249 if params:
254 250 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
255 251 return base_url
@@ -1,222 +1,222 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib.pyramid_utils import get_app_config
24 24 from rhodecode.tests.fixture import TestINI
25 25 from rhodecode.tests.server_utils import RcVCSServer
26 26
27 27
28 28 @pytest.fixture(scope='session')
29 29 def vcsserver(request, vcsserver_port, vcsserver_factory):
30 30 """
31 31 Session scope VCSServer.
32 32
33 33 Tests wich need the VCSServer have to rely on this fixture in order
34 34 to ensure it will be running.
35 35
36 36 For specific needs, the fixture vcsserver_factory can be used. It allows to
37 37 adjust the configuration file for the test run.
38 38
39 39 Command line args:
40 40
41 41 --without-vcsserver: Allows to switch this fixture off. You have to
42 42 manually start the server.
43 43
44 44 --vcsserver-port: Will expect the VCSServer to listen on this port.
45 45 """
46 46
47 47 if not request.config.getoption('with_vcsserver'):
48 48 return None
49 49
50 50 return vcsserver_factory(
51 51 request, vcsserver_port=vcsserver_port)
52 52
53 53
54 54 @pytest.fixture(scope='session')
55 55 def vcsserver_factory(tmpdir_factory):
56 56 """
57 57 Use this if you need a running vcsserver with a special configuration.
58 58 """
59 59
60 60 def factory(request, overrides=(), vcsserver_port=None,
61 61 log_file=None):
62 62
63 63 if vcsserver_port is None:
64 64 vcsserver_port = get_available_port()
65 65
66 66 overrides = list(overrides)
67 67 overrides.append({'server:main': {'port': vcsserver_port}})
68 68
69 69 option_name = 'vcsserver_config_http'
70 70 override_option_name = 'vcsserver_config_override'
71 71 config_file = get_config(
72 72 request.config, option_name=option_name,
73 73 override_option_name=override_option_name, overrides=overrides,
74 74 basetemp=tmpdir_factory.getbasetemp().strpath,
75 75 prefix='test_vcs_')
76 76
77 77 server = RcVCSServer(config_file, log_file)
78 78 server.start()
79 79
80 80 @request.addfinalizer
81 81 def cleanup():
82 82 server.shutdown()
83 83
84 84 server.wait_until_ready()
85 85 return server
86 86
87 87 return factory
88 88
89 89
90 90 def _use_log_level(config):
91 91 level = config.getoption('test_loglevel') or 'critical'
92 92 return level.upper()
93 93
94 94
95 95 @pytest.fixture(scope='session')
96 96 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
97 97 option_name = 'pyramid_config'
98 98 log_level = _use_log_level(request.config)
99 99
100 100 overrides = [
101 101 {'server:main': {'port': rcserver_port}},
102 102 {'app:main': {
103 103 'vcs.server': 'localhost:%s' % vcsserver_port,
104 104 # johbo: We will always start the VCSServer on our own based on the
105 105 # fixtures of the test cases. For the test run it must always be
106 106 # off in the INI file.
107 107 'vcs.start_server': 'false',
108 108
109 109 'vcs.server.protocol': 'http',
110 110 'vcs.scm_app_implementation': 'http',
111 111 'vcs.hooks.protocol': 'http',
112 112 'vcs.hooks.host': '127.0.0.1',
113 113 }},
114 114
115 115 {'handler_console': {
116 116 'class': 'StreamHandler',
117 117 'args': '(sys.stderr,)',
118 118 'level': log_level,
119 119 }},
120 120
121 121 ]
122 122
123 123 filename = get_config(
124 124 request.config, option_name=option_name,
125 125 override_option_name='{}_override'.format(option_name),
126 126 overrides=overrides,
127 127 basetemp=tmpdir_factory.getbasetemp().strpath,
128 128 prefix='test_rce_')
129 129 return filename
130 130
131 131
132 132 @pytest.fixture(scope='session')
133 133 def ini_settings(ini_config):
134 134 ini_path = ini_config
135 135 return get_app_config(ini_path)
136 136
137 137
138 138 def get_available_port(min_port=40000, max_port=55555):
139 139 from rhodecode.lib.utils2 import get_available_port as _get_port
140 140 return _get_port(min_port, max_port)
141 141
142 142
143 143 @pytest.fixture(scope='session')
144 144 def rcserver_port(request):
145 145 port = get_available_port()
146 print('Using rhodecode port {}'.format(port))
146 print(f'Using rhodecode port {port}')
147 147 return port
148 148
149 149
150 150 @pytest.fixture(scope='session')
151 151 def vcsserver_port(request):
152 152 port = request.config.getoption('--vcsserver-port')
153 153 if port is None:
154 154 port = get_available_port()
155 print('Using vcsserver port {}'.format(port))
155 print(f'Using vcsserver port {port}')
156 156 return port
157 157
158 158
159 159 @pytest.fixture(scope='session')
160 160 def available_port_factory():
161 161 """
162 162 Returns a callable which returns free port numbers.
163 163 """
164 164 return get_available_port
165 165
166 166
167 167 @pytest.fixture()
168 168 def available_port(available_port_factory):
169 169 """
170 170 Gives you one free port for the current test.
171 171
172 172 Uses "available_port_factory" to retrieve the port.
173 173 """
174 174 return available_port_factory()
175 175
176 176
177 177 @pytest.fixture(scope='session')
178 178 def testini_factory(tmpdir_factory, ini_config):
179 179 """
180 180 Factory to create an INI file based on TestINI.
181 181
182 182 It will make sure to place the INI file in the correct directory.
183 183 """
184 184 basetemp = tmpdir_factory.getbasetemp().strpath
185 185 return TestIniFactory(basetemp, ini_config)
186 186
187 187
188 188 class TestIniFactory(object):
189 189
190 190 def __init__(self, basetemp, template_ini):
191 191 self._basetemp = basetemp
192 192 self._template_ini = template_ini
193 193
194 194 def __call__(self, ini_params, new_file_prefix='test'):
195 195 ini_file = TestINI(
196 196 self._template_ini, ini_params=ini_params,
197 197 new_file_prefix=new_file_prefix, dir=self._basetemp)
198 198 result = ini_file.create()
199 199 return result
200 200
201 201
202 202 def get_config(
203 203 config, option_name, override_option_name, overrides=None,
204 204 basetemp=None, prefix='test'):
205 205 """
206 206 Find a configuration file and apply overrides for the given `prefix`.
207 207 """
208 208 config_file = (
209 209 config.getoption(option_name) or config.getini(option_name))
210 210 if not config_file:
211 211 pytest.exit(
212 212 "Configuration error, could not extract {}.".format(option_name))
213 213
214 214 overrides = overrides or []
215 215 config_override = config.getoption(override_option_name)
216 216 if config_override:
217 217 overrides.append(config_override)
218 218 temp_ini_file = TestINI(
219 219 config_file, ini_params=overrides, new_file_prefix=prefix,
220 220 dir=basetemp)
221 221
222 222 return temp_ini_file.create()
@@ -1,1724 +1,1725 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 import hashlib
24 23 import os
25 24 import re
26 25 import pprint
27 26 import shutil
28 27 import socket
29 28 import subprocess
30 29 import time
31 30 import uuid
32 31 import dateutil.tz
33 32 import logging
33 import functools
34 34
35 35 import mock
36 36 import pyramid.testing
37 37 import pytest
38 38 import colander
39 39 import requests
40 40 import pyramid.paster
41 41
42 42 import rhodecode
43 from rhodecode.lib.utils2 import AttributeDict
43 import rhodecode.lib
44 44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 45 from rhodecode.model.comment import CommentsModel
46 46 from rhodecode.model.db import (
47 47 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
48 48 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.pull_request import PullRequestModel
51 51 from rhodecode.model.repo import RepoModel
52 52 from rhodecode.model.repo_group import RepoGroupModel
53 53 from rhodecode.model.user import UserModel
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55 from rhodecode.model.user_group import UserGroupModel
56 56 from rhodecode.model.integration import IntegrationModel
57 57 from rhodecode.integrations import integration_type_registry
58 58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.hash_utils import sha1_safe
60 61 from rhodecode.lib.vcs.backends import get_backend
61 62 from rhodecode.lib.vcs.nodes import FileNode
62 63 from rhodecode.tests import (
63 64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 66 TEST_USER_REGULAR_PASS)
66 67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 68 from rhodecode.tests.fixture import Fixture
68 69 from rhodecode.config import utils as config_utils
69 70
70 71 log = logging.getLogger(__name__)
71 72
72 73
73 74 def cmp(a, b):
74 75 # backport cmp from python2 so we can still use it in the custom code in this module
75 76 return (a > b) - (a < b)
76 77
78
77 79 @pytest.fixture(scope='session', autouse=True)
78 80 def activate_example_rcextensions(request):
79 81 """
80 82 Patch in an example rcextensions module which verifies passed in kwargs.
81 83 """
82 84 from rhodecode.config import rcextensions
83 85
84 86 old_extensions = rhodecode.EXTENSIONS
85 87 rhodecode.EXTENSIONS = rcextensions
86 88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
87 89
88 90 @request.addfinalizer
89 91 def cleanup():
90 92 rhodecode.EXTENSIONS = old_extensions
91 93
92 94
93 95 @pytest.fixture()
94 96 def capture_rcextensions():
95 97 """
96 98 Returns the recorded calls to entry points in rcextensions.
97 99 """
98 100 calls = rhodecode.EXTENSIONS.calls
99 101 calls.clear()
100 102 # Note: At this moment, it is still the empty dict, but that will
101 103 # be filled during the test run and since it is a reference this
102 104 # is enough to make it work.
103 105 return calls
104 106
105 107
106 108 @pytest.fixture(scope='session')
107 109 def http_environ_session():
108 110 """
109 111 Allow to use "http_environ" in session scope.
110 112 """
111 113 return plain_http_environ()
112 114
113 115
114 116 def plain_http_host_stub():
115 117 """
116 118 Value of HTTP_HOST in the test run.
117 119 """
118 120 return 'example.com:80'
119 121
120 122
121 123 @pytest.fixture()
122 124 def http_host_stub():
123 125 """
124 126 Value of HTTP_HOST in the test run.
125 127 """
126 128 return plain_http_host_stub()
127 129
128 130
129 131 def plain_http_host_only_stub():
130 132 """
131 133 Value of HTTP_HOST in the test run.
132 134 """
133 135 return plain_http_host_stub().split(':')[0]
134 136
135 137
136 138 @pytest.fixture()
137 139 def http_host_only_stub():
138 140 """
139 141 Value of HTTP_HOST in the test run.
140 142 """
141 143 return plain_http_host_only_stub()
142 144
143 145
144 146 def plain_http_environ():
145 147 """
146 148 HTTP extra environ keys.
147 149
148 150 User by the test application and as well for setting up the pylons
149 151 environment. In the case of the fixture "app" it should be possible
150 152 to override this for a specific test case.
151 153 """
152 154 return {
153 155 'SERVER_NAME': plain_http_host_only_stub(),
154 156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
155 157 'HTTP_HOST': plain_http_host_stub(),
156 158 'HTTP_USER_AGENT': 'rc-test-agent',
157 159 'REQUEST_METHOD': 'GET'
158 160 }
159 161
160 162
161 163 @pytest.fixture()
162 164 def http_environ():
163 165 """
164 166 HTTP extra environ keys.
165 167
166 168 User by the test application and as well for setting up the pylons
167 169 environment. In the case of the fixture "app" it should be possible
168 170 to override this for a specific test case.
169 171 """
170 172 return plain_http_environ()
171 173
172 174
173 175 @pytest.fixture(scope='session')
174 176 def baseapp(ini_config, vcsserver, http_environ_session):
175 177 from rhodecode.lib.pyramid_utils import get_app_config
176 178 from rhodecode.config.middleware import make_pyramid_app
177 179
178 180 log.info("Using the RhodeCode configuration:{}".format(ini_config))
179 181 pyramid.paster.setup_logging(ini_config)
180 182
181 183 settings = get_app_config(ini_config)
182 184 app = make_pyramid_app({'__file__': ini_config}, **settings)
183 185
184 186 return app
185 187
186 188
187 189 @pytest.fixture(scope='function')
188 190 def app(request, config_stub, baseapp, http_environ):
189 191 app = CustomTestApp(
190 192 baseapp,
191 193 extra_environ=http_environ)
192 194 if request.cls:
193 195 request.cls.app = app
194 196 return app
195 197
196 198
197 199 @pytest.fixture(scope='session')
198 200 def app_settings(baseapp, ini_config):
199 201 """
200 202 Settings dictionary used to create the app.
201 203
202 204 Parses the ini file and passes the result through the sanitize and apply
203 205 defaults mechanism in `rhodecode.config.middleware`.
204 206 """
205 207 return baseapp.config.get_settings()
206 208
207 209
208 210 @pytest.fixture(scope='session')
209 211 def db_connection(ini_settings):
210 212 # Initialize the database connection.
211 213 config_utils.initialize_database(ini_settings)
212 214
213 215
214 216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
215 217
216 218
217 219 def _autologin_user(app, *args):
218 220 session = login_user_session(app, *args)
219 221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
220 222 return LoginData(csrf_token, session['rhodecode_user'])
221 223
222 224
223 225 @pytest.fixture()
224 226 def autologin_user(app):
225 227 """
226 228 Utility fixture which makes sure that the admin user is logged in
227 229 """
228 230 return _autologin_user(app)
229 231
230 232
231 233 @pytest.fixture()
232 234 def autologin_regular_user(app):
233 235 """
234 236 Utility fixture which makes sure that the regular user is logged in
235 237 """
236 238 return _autologin_user(
237 239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
238 240
239 241
240 242 @pytest.fixture(scope='function')
241 243 def csrf_token(request, autologin_user):
242 244 return autologin_user.csrf_token
243 245
244 246
245 247 @pytest.fixture(scope='function')
246 248 def xhr_header(request):
247 249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
248 250
249 251
250 252 @pytest.fixture()
251 253 def real_crypto_backend(monkeypatch):
252 254 """
253 255 Switch the production crypto backend on for this test.
254 256
255 257 During the test run the crypto backend is replaced with a faster
256 258 implementation based on the MD5 algorithm.
257 259 """
258 260 monkeypatch.setattr(rhodecode, 'is_test', False)
259 261
260 262
261 263 @pytest.fixture(scope='class')
262 264 def index_location(request, baseapp):
263 265 index_location = baseapp.config.get_settings()['search.location']
264 266 if request.cls:
265 267 request.cls.index_location = index_location
266 268 return index_location
267 269
268 270
269 271 @pytest.fixture(scope='session', autouse=True)
270 272 def tests_tmp_path(request):
271 273 """
272 274 Create temporary directory to be used during the test session.
273 275 """
274 276 if not os.path.exists(TESTS_TMP_PATH):
275 277 os.makedirs(TESTS_TMP_PATH)
276 278
277 279 if not request.config.getoption('--keep-tmp-path'):
278 280 @request.addfinalizer
279 281 def remove_tmp_path():
280 282 shutil.rmtree(TESTS_TMP_PATH)
281 283
282 284 return TESTS_TMP_PATH
283 285
284 286
285 287 @pytest.fixture()
286 288 def test_repo_group(request):
287 289 """
288 290 Create a temporary repository group, and destroy it after
289 291 usage automatically
290 292 """
291 293 fixture = Fixture()
292 294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
293 295 repo_group = fixture.create_repo_group(repogroupid)
294 296
295 297 def _cleanup():
296 298 fixture.destroy_repo_group(repogroupid)
297 299
298 300 request.addfinalizer(_cleanup)
299 301 return repo_group
300 302
301 303
302 304 @pytest.fixture()
303 305 def test_user_group(request):
304 306 """
305 307 Create a temporary user group, and destroy it after
306 308 usage automatically
307 309 """
308 310 fixture = Fixture()
309 311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
310 312 user_group = fixture.create_user_group(usergroupid)
311 313
312 314 def _cleanup():
313 315 fixture.destroy_user_group(user_group)
314 316
315 317 request.addfinalizer(_cleanup)
316 318 return user_group
317 319
318 320
319 321 @pytest.fixture(scope='session')
320 322 def test_repo(request):
321 323 container = TestRepoContainer()
322 324 request.addfinalizer(container._cleanup)
323 325 return container
324 326
325 327
326 328 class TestRepoContainer(object):
327 329 """
328 330 Container for test repositories which are used read only.
329 331
330 332 Repositories will be created on demand and re-used during the lifetime
331 333 of this object.
332 334
333 335 Usage to get the svn test repository "minimal"::
334 336
335 337 test_repo = TestContainer()
336 338 repo = test_repo('minimal', 'svn')
337 339
338 340 """
339 341
340 342 dump_extractors = {
341 343 'git': utils.extract_git_repo_from_dump,
342 344 'hg': utils.extract_hg_repo_from_dump,
343 345 'svn': utils.extract_svn_repo_from_dump,
344 346 }
345 347
346 348 def __init__(self):
347 349 self._cleanup_repos = []
348 350 self._fixture = Fixture()
349 351 self._repos = {}
350 352
351 353 def __call__(self, dump_name, backend_alias, config=None):
352 354 key = (dump_name, backend_alias)
353 355 if key not in self._repos:
354 356 repo = self._create_repo(dump_name, backend_alias, config)
355 357 self._repos[key] = repo.repo_id
356 358 return Repository.get(self._repos[key])
357 359
358 360 def _create_repo(self, dump_name, backend_alias, config):
359 361 repo_name = '%s-%s' % (backend_alias, dump_name)
360 362 backend = get_backend(backend_alias)
361 363 dump_extractor = self.dump_extractors[backend_alias]
362 364 repo_path = dump_extractor(dump_name, repo_name)
363 365
364 366 vcs_repo = backend(repo_path, config=config)
365 367 repo2db_mapper({repo_name: vcs_repo})
366 368
367 369 repo = RepoModel().get_by_repo_name(repo_name)
368 370 self._cleanup_repos.append(repo_name)
369 371 return repo
370 372
371 373 def _cleanup(self):
372 374 for repo_name in reversed(self._cleanup_repos):
373 375 self._fixture.destroy_repo(repo_name)
374 376
375 377
376 378 def backend_base(request, backend_alias, baseapp, test_repo):
377 379 if backend_alias not in request.config.getoption('--backends'):
378 380 pytest.skip("Backend %s not selected." % (backend_alias, ))
379 381
380 382 utils.check_xfail_backends(request.node, backend_alias)
381 383 utils.check_skip_backends(request.node, backend_alias)
382 384
383 385 repo_name = 'vcs_test_%s' % (backend_alias, )
384 386 backend = Backend(
385 387 alias=backend_alias,
386 388 repo_name=repo_name,
387 389 test_name=request.node.name,
388 390 test_repo_container=test_repo)
389 391 request.addfinalizer(backend.cleanup)
390 392 return backend
391 393
392 394
393 395 @pytest.fixture()
394 396 def backend(request, backend_alias, baseapp, test_repo):
395 397 """
396 398 Parametrized fixture which represents a single backend implementation.
397 399
398 400 It respects the option `--backends` to focus the test run on specific
399 401 backend implementations.
400 402
401 403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
402 404 for specific backends. This is intended as a utility for incremental
403 405 development of a new backend implementation.
404 406 """
405 407 return backend_base(request, backend_alias, baseapp, test_repo)
406 408
407 409
408 410 @pytest.fixture()
409 411 def backend_git(request, baseapp, test_repo):
410 412 return backend_base(request, 'git', baseapp, test_repo)
411 413
412 414
413 415 @pytest.fixture()
414 416 def backend_hg(request, baseapp, test_repo):
415 417 return backend_base(request, 'hg', baseapp, test_repo)
416 418
417 419
418 420 @pytest.fixture()
419 421 def backend_svn(request, baseapp, test_repo):
420 422 return backend_base(request, 'svn', baseapp, test_repo)
421 423
422 424
423 425 @pytest.fixture()
424 426 def backend_random(backend_git):
425 427 """
426 428 Use this to express that your tests need "a backend.
427 429
428 430 A few of our tests need a backend, so that we can run the code. This
429 431 fixture is intended to be used for such cases. It will pick one of the
430 432 backends and run the tests.
431 433
432 434 The fixture `backend` would run the test multiple times for each
433 435 available backend which is a pure waste of time if the test is
434 436 independent of the backend type.
435 437 """
436 438 # TODO: johbo: Change this to pick a random backend
437 439 return backend_git
438 440
439 441
440 442 @pytest.fixture()
441 443 def backend_stub(backend_git):
442 444 """
443 445 Use this to express that your tests need a backend stub
444 446
445 447 TODO: mikhail: Implement a real stub logic instead of returning
446 448 a git backend
447 449 """
448 450 return backend_git
449 451
450 452
451 453 @pytest.fixture()
452 454 def repo_stub(backend_stub):
453 455 """
454 456 Use this to express that your tests need a repository stub
455 457 """
456 458 return backend_stub.create_repo()
457 459
458 460
459 461 class Backend(object):
460 462 """
461 463 Represents the test configuration for one supported backend
462 464
463 465 Provides easy access to different test repositories based on
464 466 `__getitem__`. Such repositories will only be created once per test
465 467 session.
466 468 """
467 469
468 470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
469 471 _master_repo = None
470 472 _master_repo_path = ''
471 473 _commit_ids = {}
472 474
473 475 def __init__(self, alias, repo_name, test_name, test_repo_container):
474 476 self.alias = alias
475 477 self.repo_name = repo_name
476 478 self._cleanup_repos = []
477 479 self._test_name = test_name
478 480 self._test_repo_container = test_repo_container
479 481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
480 482 # Fixture will survive in the end.
481 483 self._fixture = Fixture()
482 484
483 485 def __getitem__(self, key):
484 486 return self._test_repo_container(key, self.alias)
485 487
486 488 def create_test_repo(self, key, config=None):
487 489 return self._test_repo_container(key, self.alias, config)
488 490
489 491 @property
490 492 def repo(self):
491 493 """
492 494 Returns the "current" repository. This is the vcs_test repo or the
493 495 last repo which has been created with `create_repo`.
494 496 """
495 497 from rhodecode.model.db import Repository
496 498 return Repository.get_by_repo_name(self.repo_name)
497 499
498 500 @property
499 501 def default_branch_name(self):
500 502 VcsRepository = get_backend(self.alias)
501 503 return VcsRepository.DEFAULT_BRANCH_NAME
502 504
503 505 @property
504 506 def default_head_id(self):
505 507 """
506 508 Returns the default head id of the underlying backend.
507 509
508 510 This will be the default branch name in case the backend does have a
509 511 default branch. In the other cases it will point to a valid head
510 512 which can serve as the base to create a new commit on top of it.
511 513 """
512 514 vcsrepo = self.repo.scm_instance()
513 515 head_id = (
514 516 vcsrepo.DEFAULT_BRANCH_NAME or
515 517 vcsrepo.commit_ids[-1])
516 518 return head_id
517 519
518 520 @property
519 521 def commit_ids(self):
520 522 """
521 523 Returns the list of commits for the last created repository
522 524 """
523 525 return self._commit_ids
524 526
525 527 def create_master_repo(self, commits):
526 528 """
527 529 Create a repository and remember it as a template.
528 530
529 531 This allows to easily create derived repositories to construct
530 532 more complex scenarios for diff, compare and pull requests.
531 533
532 534 Returns a commit map which maps from commit message to raw_id.
533 535 """
534 536 self._master_repo = self.create_repo(commits=commits)
535 537 self._master_repo_path = self._master_repo.repo_full_path
536 538
537 539 return self._commit_ids
538 540
539 541 def create_repo(
540 542 self, commits=None, number_of_commits=0, heads=None,
541 543 name_suffix=u'', bare=False, **kwargs):
542 544 """
543 545 Create a repository and record it for later cleanup.
544 546
545 547 :param commits: Optional. A sequence of dict instances.
546 548 Will add a commit per entry to the new repository.
547 549 :param number_of_commits: Optional. If set to a number, this number of
548 550 commits will be added to the new repository.
549 551 :param heads: Optional. Can be set to a sequence of of commit
550 552 names which shall be pulled in from the master repository.
551 553 :param name_suffix: adds special suffix to generated repo name
552 554 :param bare: set a repo as bare (no checkout)
553 555 """
554 556 self.repo_name = self._next_repo_name() + name_suffix
555 557 repo = self._fixture.create_repo(
556 558 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
557 559 self._cleanup_repos.append(repo.repo_name)
558 560
559 561 commits = commits or [
560 562 {'message': 'Commit %s of %s' % (x, self.repo_name)}
561 563 for x in range(number_of_commits)]
562 564 vcs_repo = repo.scm_instance()
563 565 vcs_repo.count()
564 566 self._add_commits_to_repo(vcs_repo, commits)
565 567 if heads:
566 568 self.pull_heads(repo, heads)
567 569
568 570 return repo
569 571
570 572 def pull_heads(self, repo, heads):
571 573 """
572 574 Make sure that repo contains all commits mentioned in `heads`
573 575 """
574 576 vcsrepo = repo.scm_instance()
575 577 vcsrepo.config.clear_section('hooks')
576 578 commit_ids = [self._commit_ids[h] for h in heads]
577 579 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
578 580
579 581 def create_fork(self):
580 582 repo_to_fork = self.repo_name
581 583 self.repo_name = self._next_repo_name()
582 584 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
583 585 self._cleanup_repos.append(self.repo_name)
584 586 return repo
585 587
586 588 def new_repo_name(self, suffix=u''):
587 589 self.repo_name = self._next_repo_name() + suffix
588 590 self._cleanup_repos.append(self.repo_name)
589 591 return self.repo_name
590 592
591 593 def _next_repo_name(self):
592 594 return u"%s_%s" % (
593 595 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
594 596
595 597 def ensure_file(self, filename, content='Test content\n'):
596 598 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
597 599 commits = [
598 600 {'added': [
599 601 FileNode(filename, content=content),
600 602 ]},
601 603 ]
602 604 self._add_commits_to_repo(self.repo.scm_instance(), commits)
603 605
604 606 def enable_downloads(self):
605 607 repo = self.repo
606 608 repo.enable_downloads = True
607 609 Session().add(repo)
608 610 Session().commit()
609 611
610 612 def cleanup(self):
611 613 for repo_name in reversed(self._cleanup_repos):
612 614 self._fixture.destroy_repo(repo_name)
613 615
614 616 def _add_commits_to_repo(self, repo, commits):
615 617 commit_ids = _add_commits_to_repo(repo, commits)
616 618 if not commit_ids:
617 619 return
618 620 self._commit_ids = commit_ids
619 621
620 622 # Creating refs for Git to allow fetching them from remote repository
621 623 if self.alias == 'git':
622 624 refs = {}
623 625 for message in self._commit_ids:
624 626 # TODO: mikhail: do more special chars replacements
625 627 ref_name = 'refs/test-refs/{}'.format(
626 628 message.replace(' ', ''))
627 629 refs[ref_name] = self._commit_ids[message]
628 630 self._create_refs(repo, refs)
629 631
630 632 def _create_refs(self, repo, refs):
631 633 for ref_name in refs:
632 634 repo.set_refs(ref_name, refs[ref_name])
633 635
634 636
635 637 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
636 638 if backend_alias not in request.config.getoption('--backends'):
637 639 pytest.skip("Backend %s not selected." % (backend_alias, ))
638 640
639 641 utils.check_xfail_backends(request.node, backend_alias)
640 642 utils.check_skip_backends(request.node, backend_alias)
641 643
642 644 repo_name = 'vcs_test_%s' % (backend_alias, )
643 645 repo_path = os.path.join(tests_tmp_path, repo_name)
644 646 backend = VcsBackend(
645 647 alias=backend_alias,
646 648 repo_path=repo_path,
647 649 test_name=request.node.name,
648 650 test_repo_container=test_repo)
649 651 request.addfinalizer(backend.cleanup)
650 652 return backend
651 653
652 654
653 655 @pytest.fixture()
654 656 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
655 657 """
656 658 Parametrized fixture which represents a single vcs backend implementation.
657 659
658 660 See the fixture `backend` for more details. This one implements the same
659 661 concept, but on vcs level. So it does not provide model instances etc.
660 662
661 663 Parameters are generated dynamically, see :func:`pytest_generate_tests`
662 664 for how this works.
663 665 """
664 666 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
665 667
666 668
667 669 @pytest.fixture()
668 670 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
669 671 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
670 672
671 673
672 674 @pytest.fixture()
673 675 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
674 676 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
675 677
676 678
677 679 @pytest.fixture()
678 680 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
679 681 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
680 682
681 683
682 684 @pytest.fixture()
683 685 def vcsbackend_stub(vcsbackend_git):
684 686 """
685 687 Use this to express that your test just needs a stub of a vcsbackend.
686 688
687 689 Plan is to eventually implement an in-memory stub to speed tests up.
688 690 """
689 691 return vcsbackend_git
690 692
691 693
692 694 class VcsBackend(object):
693 695 """
694 696 Represents the test configuration for one supported vcs backend.
695 697 """
696 698
697 699 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
698 700
699 701 def __init__(self, alias, repo_path, test_name, test_repo_container):
700 702 self.alias = alias
701 703 self._repo_path = repo_path
702 704 self._cleanup_repos = []
703 705 self._test_name = test_name
704 706 self._test_repo_container = test_repo_container
705 707
706 708 def __getitem__(self, key):
707 709 return self._test_repo_container(key, self.alias).scm_instance()
708 710
709 711 @property
710 712 def repo(self):
711 713 """
712 714 Returns the "current" repository. This is the vcs_test repo of the last
713 715 repo which has been created.
714 716 """
715 717 Repository = get_backend(self.alias)
716 718 return Repository(self._repo_path)
717 719
718 720 @property
719 721 def backend(self):
720 722 """
721 723 Returns the backend implementation class.
722 724 """
723 725 return get_backend(self.alias)
724 726
725 727 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
726 728 bare=False):
727 729 repo_name = self._next_repo_name()
728 730 self._repo_path = get_new_dir(repo_name)
729 731 repo_class = get_backend(self.alias)
730 732 src_url = None
731 733 if _clone_repo:
732 734 src_url = _clone_repo.path
733 735 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
734 736 self._cleanup_repos.append(repo)
735 737
736 738 commits = commits or [
737 739 {'message': 'Commit %s of %s' % (x, repo_name)}
738 740 for x in range(number_of_commits)]
739 741 _add_commits_to_repo(repo, commits)
740 742 return repo
741 743
742 744 def clone_repo(self, repo):
743 745 return self.create_repo(_clone_repo=repo)
744 746
745 747 def cleanup(self):
746 748 for repo in self._cleanup_repos:
747 749 shutil.rmtree(repo.path)
748 750
749 751 def new_repo_path(self):
750 752 repo_name = self._next_repo_name()
751 753 self._repo_path = get_new_dir(repo_name)
752 754 return self._repo_path
753 755
754 756 def _next_repo_name(self):
755 757 return "%s_%s" % (
756 758 self.invalid_repo_name.sub('_', self._test_name),
757 759 len(self._cleanup_repos))
758 760
759 761 def add_file(self, repo, filename, content='Test content\n'):
760 762 imc = repo.in_memory_commit
761 763 imc.add(FileNode(filename, content=content))
762 764 imc.commit(
763 765 message=u'Automatic commit from vcsbackend fixture',
764 766 author=u'Automatic <automatic@rhodecode.com>')
765 767
766 768 def ensure_file(self, filename, content='Test content\n'):
767 769 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
768 770 self.add_file(self.repo, filename, content)
769 771
770 772
771 773 def _add_commits_to_repo(vcs_repo, commits):
772 774 commit_ids = {}
773 775 if not commits:
774 776 return commit_ids
775 777
776 778 imc = vcs_repo.in_memory_commit
777 779 commit = None
778 780
779 781 for idx, commit in enumerate(commits):
780 782 message = str(commit.get('message', 'Commit %s' % idx))
781 783
782 784 for node in commit.get('added', []):
783 785 imc.add(FileNode(node.path, content=node.content))
784 786 for node in commit.get('changed', []):
785 787 imc.change(FileNode(node.path, content=node.content))
786 788 for node in commit.get('removed', []):
787 789 imc.remove(FileNode(node.path))
788 790
789 791 parents = [
790 792 vcs_repo.get_commit(commit_id=commit_ids[p])
791 793 for p in commit.get('parents', [])]
792 794
793 795 operations = ('added', 'changed', 'removed')
794 796 if not any((commit.get(o) for o in operations)):
795 797 imc.add(FileNode('file_%s' % idx, content=message))
796 798
797 799 commit = imc.commit(
798 800 message=message,
799 801 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
800 802 date=commit.get('date'),
801 803 branch=commit.get('branch'),
802 804 parents=parents)
803 805
804 806 commit_ids[commit.message] = commit.raw_id
805 807
806 808 return commit_ids
807 809
808 810
809 811 @pytest.fixture()
810 812 def reposerver(request):
811 813 """
812 814 Allows to serve a backend repository
813 815 """
814 816
815 817 repo_server = RepoServer()
816 818 request.addfinalizer(repo_server.cleanup)
817 819 return repo_server
818 820
819 821
820 822 class RepoServer(object):
821 823 """
822 824 Utility to serve a local repository for the duration of a test case.
823 825
824 826 Supports only Subversion so far.
825 827 """
826 828
827 829 url = None
828 830
829 831 def __init__(self):
830 832 self._cleanup_servers = []
831 833
832 834 def serve(self, vcsrepo):
833 835 if vcsrepo.alias != 'svn':
834 836 raise TypeError("Backend %s not supported" % vcsrepo.alias)
835 837
836 838 proc = subprocess.Popen(
837 839 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
838 840 '--root', vcsrepo.path])
839 841 self._cleanup_servers.append(proc)
840 842 self.url = 'svn://localhost'
841 843
842 844 def cleanup(self):
843 845 for proc in self._cleanup_servers:
844 846 proc.terminate()
845 847
846 848
847 849 @pytest.fixture()
848 850 def pr_util(backend, request, config_stub):
849 851 """
850 852 Utility for tests of models and for functional tests around pull requests.
851 853
852 854 It gives an instance of :class:`PRTestUtility` which provides various
853 855 utility methods around one pull request.
854 856
855 857 This fixture uses `backend` and inherits its parameterization.
856 858 """
857 859
858 860 util = PRTestUtility(backend)
859 861 request.addfinalizer(util.cleanup)
860 862
861 863 return util
862 864
863 865
864 866 class PRTestUtility(object):
865 867
866 868 pull_request = None
867 869 pull_request_id = None
868 870 mergeable_patcher = None
869 871 mergeable_mock = None
870 872 notification_patcher = None
871 873
872 874 def __init__(self, backend):
873 875 self.backend = backend
874 876
875 877 def create_pull_request(
876 878 self, commits=None, target_head=None, source_head=None,
877 879 revisions=None, approved=False, author=None, mergeable=False,
878 880 enable_notifications=True, name_suffix=u'', reviewers=None, observers=None,
879 881 title=u"Test", description=u"Description"):
880 882 self.set_mergeable(mergeable)
881 883 if not enable_notifications:
882 884 # mock notification side effect
883 885 self.notification_patcher = mock.patch(
884 886 'rhodecode.model.notification.NotificationModel.create')
885 887 self.notification_patcher.start()
886 888
887 889 if not self.pull_request:
888 890 if not commits:
889 891 commits = [
890 892 {'message': 'c1'},
891 893 {'message': 'c2'},
892 894 {'message': 'c3'},
893 895 ]
894 896 target_head = 'c1'
895 897 source_head = 'c2'
896 898 revisions = ['c2']
897 899
898 900 self.commit_ids = self.backend.create_master_repo(commits)
899 901 self.target_repository = self.backend.create_repo(
900 902 heads=[target_head], name_suffix=name_suffix)
901 903 self.source_repository = self.backend.create_repo(
902 904 heads=[source_head], name_suffix=name_suffix)
903 905 self.author = author or UserModel().get_by_username(
904 906 TEST_USER_ADMIN_LOGIN)
905 907
906 908 model = PullRequestModel()
907 909 self.create_parameters = {
908 910 'created_by': self.author,
909 911 'source_repo': self.source_repository.repo_name,
910 912 'source_ref': self._default_branch_reference(source_head),
911 913 'target_repo': self.target_repository.repo_name,
912 914 'target_ref': self._default_branch_reference(target_head),
913 915 'revisions': [self.commit_ids[r] for r in revisions],
914 916 'reviewers': reviewers or self._get_reviewers(),
915 917 'observers': observers or self._get_observers(),
916 918 'title': title,
917 919 'description': description,
918 920 }
919 921 self.pull_request = model.create(**self.create_parameters)
920 922 assert model.get_versions(self.pull_request) == []
921 923
922 924 self.pull_request_id = self.pull_request.pull_request_id
923 925
924 926 if approved:
925 927 self.approve()
926 928
927 929 Session().add(self.pull_request)
928 930 Session().commit()
929 931
930 932 return self.pull_request
931 933
932 934 def approve(self):
933 935 self.create_status_votes(
934 936 ChangesetStatus.STATUS_APPROVED,
935 937 *self.pull_request.reviewers)
936 938
937 939 def close(self):
938 940 PullRequestModel().close_pull_request(self.pull_request, self.author)
939 941
940 942 def _default_branch_reference(self, commit_message):
941 943 reference = '%s:%s:%s' % (
942 944 'branch',
943 945 self.backend.default_branch_name,
944 946 self.commit_ids[commit_message])
945 947 return reference
946 948
947 949 def _get_reviewers(self):
948 950 role = PullRequestReviewers.ROLE_REVIEWER
949 951 return [
950 952 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
951 953 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
952 954 ]
953 955
954 956 def _get_observers(self):
955 957 return [
956 958
957 959 ]
958 960
959 961 def update_source_repository(self, head=None):
960 962 heads = [head or 'c3']
961 963 self.backend.pull_heads(self.source_repository, heads=heads)
962 964
963 965 def add_one_commit(self, head=None):
964 966 self.update_source_repository(head=head)
965 967 old_commit_ids = set(self.pull_request.revisions)
966 968 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
967 969 commit_ids = set(self.pull_request.revisions)
968 970 new_commit_ids = commit_ids - old_commit_ids
969 971 assert len(new_commit_ids) == 1
970 972 return new_commit_ids.pop()
971 973
972 974 def remove_one_commit(self):
973 975 assert len(self.pull_request.revisions) == 2
974 976 source_vcs = self.source_repository.scm_instance()
975 977 removed_commit_id = source_vcs.commit_ids[-1]
976 978
977 979 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
978 980 # remove the if once that's sorted out.
979 981 if self.backend.alias == "git":
980 982 kwargs = {'branch_name': self.backend.default_branch_name}
981 983 else:
982 984 kwargs = {}
983 985 source_vcs.strip(removed_commit_id, **kwargs)
984 986
985 987 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
986 988 assert len(self.pull_request.revisions) == 1
987 989 return removed_commit_id
988 990
989 991 def create_comment(self, linked_to=None):
990 992 comment = CommentsModel().create(
991 993 text=u"Test comment",
992 994 repo=self.target_repository.repo_name,
993 995 user=self.author,
994 996 pull_request=self.pull_request)
995 997 assert comment.pull_request_version_id is None
996 998
997 999 if linked_to:
998 1000 PullRequestModel()._link_comments_to_version(linked_to)
999 1001
1000 1002 return comment
1001 1003
1002 1004 def create_inline_comment(
1003 1005 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1004 1006 comment = CommentsModel().create(
1005 1007 text=u"Test comment",
1006 1008 repo=self.target_repository.repo_name,
1007 1009 user=self.author,
1008 1010 line_no=line_no,
1009 1011 f_path=file_path,
1010 1012 pull_request=self.pull_request)
1011 1013 assert comment.pull_request_version_id is None
1012 1014
1013 1015 if linked_to:
1014 1016 PullRequestModel()._link_comments_to_version(linked_to)
1015 1017
1016 1018 return comment
1017 1019
1018 1020 def create_version_of_pull_request(self):
1019 1021 pull_request = self.create_pull_request()
1020 1022 version = PullRequestModel()._create_version_from_snapshot(
1021 1023 pull_request)
1022 1024 return version
1023 1025
1024 1026 def create_status_votes(self, status, *reviewers):
1025 1027 for reviewer in reviewers:
1026 1028 ChangesetStatusModel().set_status(
1027 1029 repo=self.pull_request.target_repo,
1028 1030 status=status,
1029 1031 user=reviewer.user_id,
1030 1032 pull_request=self.pull_request)
1031 1033
1032 1034 def set_mergeable(self, value):
1033 1035 if not self.mergeable_patcher:
1034 1036 self.mergeable_patcher = mock.patch.object(
1035 1037 VcsSettingsModel, 'get_general_settings')
1036 1038 self.mergeable_mock = self.mergeable_patcher.start()
1037 1039 self.mergeable_mock.return_value = {
1038 1040 'rhodecode_pr_merge_enabled': value}
1039 1041
1040 1042 def cleanup(self):
1041 1043 # In case the source repository is already cleaned up, the pull
1042 1044 # request will already be deleted.
1043 1045 pull_request = PullRequest().get(self.pull_request_id)
1044 1046 if pull_request:
1045 1047 PullRequestModel().delete(pull_request, pull_request.author)
1046 1048 Session().commit()
1047 1049
1048 1050 if self.notification_patcher:
1049 1051 self.notification_patcher.stop()
1050 1052
1051 1053 if self.mergeable_patcher:
1052 1054 self.mergeable_patcher.stop()
1053 1055
1054 1056
1055 1057 @pytest.fixture()
1056 1058 def user_admin(baseapp):
1057 1059 """
1058 1060 Provides the default admin test user as an instance of `db.User`.
1059 1061 """
1060 1062 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1061 1063 return user
1062 1064
1063 1065
1064 1066 @pytest.fixture()
1065 1067 def user_regular(baseapp):
1066 1068 """
1067 1069 Provides the default regular test user as an instance of `db.User`.
1068 1070 """
1069 1071 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1070 1072 return user
1071 1073
1072 1074
1073 1075 @pytest.fixture()
1074 1076 def user_util(request, db_connection):
1075 1077 """
1076 1078 Provides a wired instance of `UserUtility` with integrated cleanup.
1077 1079 """
1078 1080 utility = UserUtility(test_name=request.node.name)
1079 1081 request.addfinalizer(utility.cleanup)
1080 1082 return utility
1081 1083
1082 1084
1083 1085 # TODO: johbo: Split this up into utilities per domain or something similar
1084 1086 class UserUtility(object):
1085 1087
1086 1088 def __init__(self, test_name="test"):
1087 1089 self._test_name = self._sanitize_name(test_name)
1088 1090 self.fixture = Fixture()
1089 1091 self.repo_group_ids = []
1090 1092 self.repos_ids = []
1091 1093 self.user_ids = []
1092 1094 self.user_group_ids = []
1093 1095 self.user_repo_permission_ids = []
1094 1096 self.user_group_repo_permission_ids = []
1095 1097 self.user_repo_group_permission_ids = []
1096 1098 self.user_group_repo_group_permission_ids = []
1097 1099 self.user_user_group_permission_ids = []
1098 1100 self.user_group_user_group_permission_ids = []
1099 1101 self.user_permissions = []
1100 1102
1101 1103 def _sanitize_name(self, name):
1102 1104 for char in ['[', ']']:
1103 1105 name = name.replace(char, '_')
1104 1106 return name
1105 1107
1106 1108 def create_repo_group(
1107 1109 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1108 1110 group_name = "{prefix}_repogroup_{count}".format(
1109 1111 prefix=self._test_name,
1110 1112 count=len(self.repo_group_ids))
1111 1113 repo_group = self.fixture.create_repo_group(
1112 1114 group_name, cur_user=owner)
1113 1115 if auto_cleanup:
1114 1116 self.repo_group_ids.append(repo_group.group_id)
1115 1117 return repo_group
1116 1118
1117 1119 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1118 1120 auto_cleanup=True, repo_type='hg', bare=False):
1119 1121 repo_name = "{prefix}_repository_{count}".format(
1120 1122 prefix=self._test_name,
1121 1123 count=len(self.repos_ids))
1122 1124
1123 1125 repository = self.fixture.create_repo(
1124 1126 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1125 1127 if auto_cleanup:
1126 1128 self.repos_ids.append(repository.repo_id)
1127 1129 return repository
1128 1130
1129 1131 def create_user(self, auto_cleanup=True, **kwargs):
1130 1132 user_name = "{prefix}_user_{count}".format(
1131 1133 prefix=self._test_name,
1132 1134 count=len(self.user_ids))
1133 1135 user = self.fixture.create_user(user_name, **kwargs)
1134 1136 if auto_cleanup:
1135 1137 self.user_ids.append(user.user_id)
1136 1138 return user
1137 1139
1138 1140 def create_additional_user_email(self, user, email):
1139 1141 uem = self.fixture.create_additional_user_email(user=user, email=email)
1140 1142 return uem
1141 1143
1142 1144 def create_user_with_group(self):
1143 1145 user = self.create_user()
1144 1146 user_group = self.create_user_group(members=[user])
1145 1147 return user, user_group
1146 1148
1147 1149 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1148 1150 auto_cleanup=True, **kwargs):
1149 1151 group_name = "{prefix}_usergroup_{count}".format(
1150 1152 prefix=self._test_name,
1151 1153 count=len(self.user_group_ids))
1152 1154 user_group = self.fixture.create_user_group(
1153 1155 group_name, cur_user=owner, **kwargs)
1154 1156
1155 1157 if auto_cleanup:
1156 1158 self.user_group_ids.append(user_group.users_group_id)
1157 1159 if members:
1158 1160 for user in members:
1159 1161 UserGroupModel().add_user_to_group(user_group, user)
1160 1162 return user_group
1161 1163
1162 1164 def grant_user_permission(self, user_name, permission_name):
1163 1165 self.inherit_default_user_permissions(user_name, False)
1164 1166 self.user_permissions.append((user_name, permission_name))
1165 1167
1166 1168 def grant_user_permission_to_repo_group(
1167 1169 self, repo_group, user, permission_name):
1168 1170 permission = RepoGroupModel().grant_user_permission(
1169 1171 repo_group, user, permission_name)
1170 1172 self.user_repo_group_permission_ids.append(
1171 1173 (repo_group.group_id, user.user_id))
1172 1174 return permission
1173 1175
1174 1176 def grant_user_group_permission_to_repo_group(
1175 1177 self, repo_group, user_group, permission_name):
1176 1178 permission = RepoGroupModel().grant_user_group_permission(
1177 1179 repo_group, user_group, permission_name)
1178 1180 self.user_group_repo_group_permission_ids.append(
1179 1181 (repo_group.group_id, user_group.users_group_id))
1180 1182 return permission
1181 1183
1182 1184 def grant_user_permission_to_repo(
1183 1185 self, repo, user, permission_name):
1184 1186 permission = RepoModel().grant_user_permission(
1185 1187 repo, user, permission_name)
1186 1188 self.user_repo_permission_ids.append(
1187 1189 (repo.repo_id, user.user_id))
1188 1190 return permission
1189 1191
1190 1192 def grant_user_group_permission_to_repo(
1191 1193 self, repo, user_group, permission_name):
1192 1194 permission = RepoModel().grant_user_group_permission(
1193 1195 repo, user_group, permission_name)
1194 1196 self.user_group_repo_permission_ids.append(
1195 1197 (repo.repo_id, user_group.users_group_id))
1196 1198 return permission
1197 1199
1198 1200 def grant_user_permission_to_user_group(
1199 1201 self, target_user_group, user, permission_name):
1200 1202 permission = UserGroupModel().grant_user_permission(
1201 1203 target_user_group, user, permission_name)
1202 1204 self.user_user_group_permission_ids.append(
1203 1205 (target_user_group.users_group_id, user.user_id))
1204 1206 return permission
1205 1207
1206 1208 def grant_user_group_permission_to_user_group(
1207 1209 self, target_user_group, user_group, permission_name):
1208 1210 permission = UserGroupModel().grant_user_group_permission(
1209 1211 target_user_group, user_group, permission_name)
1210 1212 self.user_group_user_group_permission_ids.append(
1211 1213 (target_user_group.users_group_id, user_group.users_group_id))
1212 1214 return permission
1213 1215
1214 1216 def revoke_user_permission(self, user_name, permission_name):
1215 1217 self.inherit_default_user_permissions(user_name, True)
1216 1218 UserModel().revoke_perm(user_name, permission_name)
1217 1219
1218 1220 def inherit_default_user_permissions(self, user_name, value):
1219 1221 user = UserModel().get_by_username(user_name)
1220 1222 user.inherit_default_permissions = value
1221 1223 Session().add(user)
1222 1224 Session().commit()
1223 1225
1224 1226 def cleanup(self):
1225 1227 self._cleanup_permissions()
1226 1228 self._cleanup_repos()
1227 1229 self._cleanup_repo_groups()
1228 1230 self._cleanup_user_groups()
1229 1231 self._cleanup_users()
1230 1232
1231 1233 def _cleanup_permissions(self):
1232 1234 if self.user_permissions:
1233 1235 for user_name, permission_name in self.user_permissions:
1234 1236 self.revoke_user_permission(user_name, permission_name)
1235 1237
1236 1238 for permission in self.user_repo_permission_ids:
1237 1239 RepoModel().revoke_user_permission(*permission)
1238 1240
1239 1241 for permission in self.user_group_repo_permission_ids:
1240 1242 RepoModel().revoke_user_group_permission(*permission)
1241 1243
1242 1244 for permission in self.user_repo_group_permission_ids:
1243 1245 RepoGroupModel().revoke_user_permission(*permission)
1244 1246
1245 1247 for permission in self.user_group_repo_group_permission_ids:
1246 1248 RepoGroupModel().revoke_user_group_permission(*permission)
1247 1249
1248 1250 for permission in self.user_user_group_permission_ids:
1249 1251 UserGroupModel().revoke_user_permission(*permission)
1250 1252
1251 1253 for permission in self.user_group_user_group_permission_ids:
1252 1254 UserGroupModel().revoke_user_group_permission(*permission)
1253 1255
1254 1256 def _cleanup_repo_groups(self):
1255 1257 def _repo_group_compare(first_group_id, second_group_id):
1256 1258 """
1257 1259 Gives higher priority to the groups with the most complex paths
1258 1260 """
1259 1261 first_group = RepoGroup.get(first_group_id)
1260 1262 second_group = RepoGroup.get(second_group_id)
1261 1263 first_group_parts = (
1262 1264 len(first_group.group_name.split('/')) if first_group else 0)
1263 1265 second_group_parts = (
1264 1266 len(second_group.group_name.split('/')) if second_group else 0)
1265 1267 return cmp(second_group_parts, first_group_parts)
1266 1268
1267 1269 sorted_repo_group_ids = sorted(
1268 self.repo_group_ids, cmp=_repo_group_compare)
1270 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1269 1271 for repo_group_id in sorted_repo_group_ids:
1270 1272 self.fixture.destroy_repo_group(repo_group_id)
1271 1273
1272 1274 def _cleanup_repos(self):
1273 1275 sorted_repos_ids = sorted(self.repos_ids)
1274 1276 for repo_id in sorted_repos_ids:
1275 1277 self.fixture.destroy_repo(repo_id)
1276 1278
1277 1279 def _cleanup_user_groups(self):
1278 1280 def _user_group_compare(first_group_id, second_group_id):
1279 1281 """
1280 1282 Gives higher priority to the groups with the most complex paths
1281 1283 """
1282 1284 first_group = UserGroup.get(first_group_id)
1283 1285 second_group = UserGroup.get(second_group_id)
1284 1286 first_group_parts = (
1285 1287 len(first_group.users_group_name.split('/'))
1286 1288 if first_group else 0)
1287 1289 second_group_parts = (
1288 1290 len(second_group.users_group_name.split('/'))
1289 1291 if second_group else 0)
1290 1292 return cmp(second_group_parts, first_group_parts)
1291 1293
1292 1294 sorted_user_group_ids = sorted(
1293 self.user_group_ids, cmp=_user_group_compare)
1295 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1294 1296 for user_group_id in sorted_user_group_ids:
1295 1297 self.fixture.destroy_user_group(user_group_id)
1296 1298
1297 1299 def _cleanup_users(self):
1298 1300 for user_id in self.user_ids:
1299 1301 self.fixture.destroy_user(user_id)
1300 1302
1301 1303
1302 1304 @pytest.fixture(scope='session')
1303 1305 def testrun():
1304 1306 return {
1305 1307 'uuid': uuid.uuid4(),
1306 1308 'start': datetime.datetime.utcnow().isoformat(),
1307 1309 'timestamp': int(time.time()),
1308 1310 }
1309 1311
1310 1312
1311 1313 class AppenlightClient(object):
1312 1314
1313 1315 url_template = '{url}?protocol_version=0.5'
1314 1316
1315 1317 def __init__(
1316 1318 self, url, api_key, add_server=True, add_timestamp=True,
1317 1319 namespace=None, request=None, testrun=None):
1318 1320 self.url = self.url_template.format(url=url)
1319 1321 self.api_key = api_key
1320 1322 self.add_server = add_server
1321 1323 self.add_timestamp = add_timestamp
1322 1324 self.namespace = namespace
1323 1325 self.request = request
1324 1326 self.server = socket.getfqdn(socket.gethostname())
1325 1327 self.tags_before = {}
1326 1328 self.tags_after = {}
1327 1329 self.stats = []
1328 1330 self.testrun = testrun or {}
1329 1331
1330 1332 def tag_before(self, tag, value):
1331 1333 self.tags_before[tag] = value
1332 1334
1333 1335 def tag_after(self, tag, value):
1334 1336 self.tags_after[tag] = value
1335 1337
1336 1338 def collect(self, data):
1337 1339 if self.add_server:
1338 1340 data.setdefault('server', self.server)
1339 1341 if self.add_timestamp:
1340 1342 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1341 1343 if self.namespace:
1342 1344 data.setdefault('namespace', self.namespace)
1343 1345 if self.request:
1344 1346 data.setdefault('request', self.request)
1345 1347 self.stats.append(data)
1346 1348
1347 1349 def send_stats(self):
1348 1350 tags = [
1349 1351 ('testrun', self.request),
1350 1352 ('testrun.start', self.testrun['start']),
1351 1353 ('testrun.timestamp', self.testrun['timestamp']),
1352 1354 ('test', self.namespace),
1353 1355 ]
1354 1356 for key, value in self.tags_before.items():
1355 1357 tags.append((key + '.before', value))
1356 1358 try:
1357 1359 delta = self.tags_after[key] - value
1358 1360 tags.append((key + '.delta', delta))
1359 1361 except Exception:
1360 1362 pass
1361 1363 for key, value in self.tags_after.items():
1362 1364 tags.append((key + '.after', value))
1363 1365 self.collect({
1364 1366 'message': "Collected tags",
1365 1367 'tags': tags,
1366 1368 })
1367 1369
1368 1370 response = requests.post(
1369 1371 self.url,
1370 1372 headers={
1371 1373 'X-appenlight-api-key': self.api_key},
1372 1374 json=self.stats,
1373 1375 )
1374 1376
1375 1377 if not response.status_code == 200:
1376 1378 pprint.pprint(self.stats)
1377 1379 print(response.headers)
1378 1380 print(response.text)
1379 1381 raise Exception('Sending to appenlight failed')
1380 1382
1381 1383
1382 1384 @pytest.fixture()
1383 1385 def gist_util(request, db_connection):
1384 1386 """
1385 1387 Provides a wired instance of `GistUtility` with integrated cleanup.
1386 1388 """
1387 1389 utility = GistUtility()
1388 1390 request.addfinalizer(utility.cleanup)
1389 1391 return utility
1390 1392
1391 1393
1392 1394 class GistUtility(object):
1393 1395 def __init__(self):
1394 1396 self.fixture = Fixture()
1395 1397 self.gist_ids = []
1396 1398
1397 1399 def create_gist(self, **kwargs):
1398 1400 gist = self.fixture.create_gist(**kwargs)
1399 1401 self.gist_ids.append(gist.gist_id)
1400 1402 return gist
1401 1403
1402 1404 def cleanup(self):
1403 1405 for id_ in self.gist_ids:
1404 1406 self.fixture.destroy_gists(str(id_))
1405 1407
1406 1408
1407 1409 @pytest.fixture()
1408 1410 def enabled_backends(request):
1409 1411 backends = request.config.option.backends
1410 1412 return backends[:]
1411 1413
1412 1414
1413 1415 @pytest.fixture()
1414 1416 def settings_util(request, db_connection):
1415 1417 """
1416 1418 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1417 1419 """
1418 1420 utility = SettingsUtility()
1419 1421 request.addfinalizer(utility.cleanup)
1420 1422 return utility
1421 1423
1422 1424
1423 1425 class SettingsUtility(object):
1424 1426 def __init__(self):
1425 1427 self.rhodecode_ui_ids = []
1426 1428 self.rhodecode_setting_ids = []
1427 1429 self.repo_rhodecode_ui_ids = []
1428 1430 self.repo_rhodecode_setting_ids = []
1429 1431
1430 1432 def create_repo_rhodecode_ui(
1431 1433 self, repo, section, value, key=None, active=True, cleanup=True):
1432 key = key or hashlib.sha1(
1433 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1434 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1434 1435
1435 1436 setting = RepoRhodeCodeUi()
1436 1437 setting.repository_id = repo.repo_id
1437 1438 setting.ui_section = section
1438 1439 setting.ui_value = value
1439 1440 setting.ui_key = key
1440 1441 setting.ui_active = active
1441 1442 Session().add(setting)
1442 1443 Session().commit()
1443 1444
1444 1445 if cleanup:
1445 1446 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1446 1447 return setting
1447 1448
1448 1449 def create_rhodecode_ui(
1449 1450 self, section, value, key=None, active=True, cleanup=True):
1450 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1451 key = key or sha1_safe(f'{section}{value}')
1451 1452
1452 1453 setting = RhodeCodeUi()
1453 1454 setting.ui_section = section
1454 1455 setting.ui_value = value
1455 1456 setting.ui_key = key
1456 1457 setting.ui_active = active
1457 1458 Session().add(setting)
1458 1459 Session().commit()
1459 1460
1460 1461 if cleanup:
1461 1462 self.rhodecode_ui_ids.append(setting.ui_id)
1462 1463 return setting
1463 1464
1464 1465 def create_repo_rhodecode_setting(
1465 1466 self, repo, name, value, type_, cleanup=True):
1466 1467 setting = RepoRhodeCodeSetting(
1467 1468 repo.repo_id, key=name, val=value, type=type_)
1468 1469 Session().add(setting)
1469 1470 Session().commit()
1470 1471
1471 1472 if cleanup:
1472 1473 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1473 1474 return setting
1474 1475
1475 1476 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1476 1477 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1477 1478 Session().add(setting)
1478 1479 Session().commit()
1479 1480
1480 1481 if cleanup:
1481 1482 self.rhodecode_setting_ids.append(setting.app_settings_id)
1482 1483
1483 1484 return setting
1484 1485
1485 1486 def cleanup(self):
1486 1487 for id_ in self.rhodecode_ui_ids:
1487 1488 setting = RhodeCodeUi.get(id_)
1488 1489 Session().delete(setting)
1489 1490
1490 1491 for id_ in self.rhodecode_setting_ids:
1491 1492 setting = RhodeCodeSetting.get(id_)
1492 1493 Session().delete(setting)
1493 1494
1494 1495 for id_ in self.repo_rhodecode_ui_ids:
1495 1496 setting = RepoRhodeCodeUi.get(id_)
1496 1497 Session().delete(setting)
1497 1498
1498 1499 for id_ in self.repo_rhodecode_setting_ids:
1499 1500 setting = RepoRhodeCodeSetting.get(id_)
1500 1501 Session().delete(setting)
1501 1502
1502 1503 Session().commit()
1503 1504
1504 1505
1505 1506 @pytest.fixture()
1506 1507 def no_notifications(request):
1507 1508 notification_patcher = mock.patch(
1508 1509 'rhodecode.model.notification.NotificationModel.create')
1509 1510 notification_patcher.start()
1510 1511 request.addfinalizer(notification_patcher.stop)
1511 1512
1512 1513
1513 1514 @pytest.fixture(scope='session')
1514 1515 def repeat(request):
1515 1516 """
1516 1517 The number of repetitions is based on this fixture.
1517 1518
1518 1519 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1519 1520 tests are not too slow in our default test suite.
1520 1521 """
1521 1522 return request.config.getoption('--repeat')
1522 1523
1523 1524
1524 1525 @pytest.fixture()
1525 1526 def rhodecode_fixtures():
1526 1527 return Fixture()
1527 1528
1528 1529
1529 1530 @pytest.fixture()
1530 1531 def context_stub():
1531 1532 """
1532 1533 Stub context object.
1533 1534 """
1534 1535 context = pyramid.testing.DummyResource()
1535 1536 return context
1536 1537
1537 1538
1538 1539 @pytest.fixture()
1539 1540 def request_stub():
1540 1541 """
1541 1542 Stub request object.
1542 1543 """
1543 1544 from rhodecode.lib.base import bootstrap_request
1544 1545 request = bootstrap_request(scheme='https')
1545 1546 return request
1546 1547
1547 1548
1548 1549 @pytest.fixture()
1549 1550 def config_stub(request, request_stub):
1550 1551 """
1551 1552 Set up pyramid.testing and return the Configurator.
1552 1553 """
1553 1554 from rhodecode.lib.base import bootstrap_config
1554 1555 config = bootstrap_config(request=request_stub)
1555 1556
1556 1557 @request.addfinalizer
1557 1558 def cleanup():
1558 1559 pyramid.testing.tearDown()
1559 1560
1560 1561 return config
1561 1562
1562 1563
1563 1564 @pytest.fixture()
1564 1565 def StubIntegrationType():
1565 1566 class _StubIntegrationType(IntegrationTypeBase):
1566 1567 """ Test integration type class """
1567 1568
1568 1569 key = 'test'
1569 1570 display_name = 'Test integration type'
1570 1571 description = 'A test integration type for testing'
1571 1572
1572 1573 @classmethod
1573 1574 def icon(cls):
1574 1575 return 'test_icon_html_image'
1575 1576
1576 1577 def __init__(self, settings):
1577 1578 super(_StubIntegrationType, self).__init__(settings)
1578 1579 self.sent_events = [] # for testing
1579 1580
1580 1581 def send_event(self, event):
1581 1582 self.sent_events.append(event)
1582 1583
1583 1584 def settings_schema(self):
1584 1585 class SettingsSchema(colander.Schema):
1585 1586 test_string_field = colander.SchemaNode(
1586 1587 colander.String(),
1587 1588 missing=colander.required,
1588 1589 title='test string field',
1589 1590 )
1590 1591 test_int_field = colander.SchemaNode(
1591 1592 colander.Int(),
1592 1593 title='some integer setting',
1593 1594 )
1594 1595 return SettingsSchema()
1595 1596
1596 1597
1597 1598 integration_type_registry.register_integration_type(_StubIntegrationType)
1598 1599 return _StubIntegrationType
1599 1600
1600 1601
1601 1602 @pytest.fixture()
1602 1603 def stub_integration_settings():
1603 1604 return {
1604 1605 'test_string_field': 'some data',
1605 1606 'test_int_field': 100,
1606 1607 }
1607 1608
1608 1609
1609 1610 @pytest.fixture()
1610 1611 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1611 1612 stub_integration_settings):
1612 1613 integration = IntegrationModel().create(
1613 1614 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1614 1615 name='test repo integration',
1615 1616 repo=repo_stub, repo_group=None, child_repos_only=None)
1616 1617
1617 1618 @request.addfinalizer
1618 1619 def cleanup():
1619 1620 IntegrationModel().delete(integration)
1620 1621
1621 1622 return integration
1622 1623
1623 1624
1624 1625 @pytest.fixture()
1625 1626 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1626 1627 stub_integration_settings):
1627 1628 integration = IntegrationModel().create(
1628 1629 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1629 1630 name='test repogroup integration',
1630 1631 repo=None, repo_group=test_repo_group, child_repos_only=True)
1631 1632
1632 1633 @request.addfinalizer
1633 1634 def cleanup():
1634 1635 IntegrationModel().delete(integration)
1635 1636
1636 1637 return integration
1637 1638
1638 1639
1639 1640 @pytest.fixture()
1640 1641 def repogroup_recursive_integration_stub(request, test_repo_group,
1641 1642 StubIntegrationType, stub_integration_settings):
1642 1643 integration = IntegrationModel().create(
1643 1644 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1644 1645 name='test recursive repogroup integration',
1645 1646 repo=None, repo_group=test_repo_group, child_repos_only=False)
1646 1647
1647 1648 @request.addfinalizer
1648 1649 def cleanup():
1649 1650 IntegrationModel().delete(integration)
1650 1651
1651 1652 return integration
1652 1653
1653 1654
1654 1655 @pytest.fixture()
1655 1656 def global_integration_stub(request, StubIntegrationType,
1656 1657 stub_integration_settings):
1657 1658 integration = IntegrationModel().create(
1658 1659 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1659 1660 name='test global integration',
1660 1661 repo=None, repo_group=None, child_repos_only=None)
1661 1662
1662 1663 @request.addfinalizer
1663 1664 def cleanup():
1664 1665 IntegrationModel().delete(integration)
1665 1666
1666 1667 return integration
1667 1668
1668 1669
1669 1670 @pytest.fixture()
1670 1671 def root_repos_integration_stub(request, StubIntegrationType,
1671 1672 stub_integration_settings):
1672 1673 integration = IntegrationModel().create(
1673 1674 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1674 1675 name='test global integration',
1675 1676 repo=None, repo_group=None, child_repos_only=True)
1676 1677
1677 1678 @request.addfinalizer
1678 1679 def cleanup():
1679 1680 IntegrationModel().delete(integration)
1680 1681
1681 1682 return integration
1682 1683
1683 1684
1684 1685 @pytest.fixture()
1685 1686 def local_dt_to_utc():
1686 1687 def _factory(dt):
1687 1688 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1688 1689 dateutil.tz.tzutc()).replace(tzinfo=None)
1689 1690 return _factory
1690 1691
1691 1692
1692 1693 @pytest.fixture()
1693 1694 def disable_anonymous_user(request, baseapp):
1694 1695 set_anonymous_access(False)
1695 1696
1696 1697 @request.addfinalizer
1697 1698 def cleanup():
1698 1699 set_anonymous_access(True)
1699 1700
1700 1701
1701 1702 @pytest.fixture(scope='module')
1702 1703 def rc_fixture(request):
1703 1704 return Fixture()
1704 1705
1705 1706
1706 1707 @pytest.fixture()
1707 1708 def repo_groups(request):
1708 1709 fixture = Fixture()
1709 1710
1710 1711 session = Session()
1711 1712 zombie_group = fixture.create_repo_group('zombie')
1712 1713 parent_group = fixture.create_repo_group('parent')
1713 1714 child_group = fixture.create_repo_group('parent/child')
1714 1715 groups_in_db = session.query(RepoGroup).all()
1715 1716 assert len(groups_in_db) == 3
1716 1717 assert child_group.group_parent_id == parent_group.group_id
1717 1718
1718 1719 @request.addfinalizer
1719 1720 def cleanup():
1720 1721 fixture.destroy_repo_group(zombie_group)
1721 1722 fixture.destroy_repo_group(child_group)
1722 1723 fixture.destroy_repo_group(parent_group)
1723 1724
1724 1725 return zombie_group, parent_group, child_group
@@ -1,51 +1,51 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24
25 25 @pytest.mark.usefixtures('autologin_user', 'app')
26 26 def test_vcs_available_returns_summary_page(app, backend):
27 27 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
28 28 response = app.get(url)
29 29 assert response.status_code == 200
30 assert 'Summary' in response.body
30 assert 'Summary' in response.text
31 31
32 32
33 33 @pytest.mark.usefixtures('autologin_user', 'app')
34 34 def test_vcs_unavailable_returns_vcs_error_page(app, backend):
35 35 from rhodecode.lib.vcs.exceptions import VCSCommunicationError
36 36
37 37 # Depending on the used VCSServer protocol we have to patch a different
38 38 # RemoteRepo class to raise an exception. For the test it doesn't matter
39 39 # if http is used, it just requires the exception to be raised.
40 40 from rhodecode.lib.vcs.client_http import RemoteRepo
41 41
42 42 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
43 43
44 44 # Patch remote repo to raise an exception instead of making a RPC.
45 45 with mock.patch.object(RemoteRepo, '__getattr__') as remote_mock:
46 46 remote_mock.side_effect = VCSCommunicationError()
47 47
48 48 response = app.get(url, expect_errors=True)
49 49
50 50 assert response.status_code == 502
51 assert 'Could not connect to VCS Server' in response.body
51 assert 'Could not connect to VCS Server' in response.text
@@ -1,99 +1,99 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 from rhodecode.tests.utils import CustomTestApp
22 22 from rhodecode.lib.middleware.utils import wsgi_app_caller_client
23 23
24 24 # pylint: disable=protected-access,too-many-public-methods
25 25
26 26
27 27 BASE_ENVIRON = {
28 28 'REQUEST_METHOD': 'GET',
29 29 'SERVER_NAME': 'localhost',
30 30 'SERVER_PORT': '80',
31 31 'SCRIPT_NAME': '',
32 32 'PATH_INFO': '/',
33 33 'QUERY_STRING': '',
34 34 'foo.bool_var': True,
35 35 'foo.str_var': 'True',
36 36 'wsgi.foo': True,
37 37 # Some non string values. The validator expects to get an iterable as
38 38 # value.
39 39 (42,): '42',
40 40 (True,): 'False',
41 41 }
42 42
43 43
44 44 def assert_all_values_are_str(environ):
45 45 """Checks that all values of a dict are str."""
46 46 for key, value in environ.items():
47 47 assert isinstance(value, str), (
48 48 "Value for key %s: has type %s but 'str' was expected. Value: %s" %
49 49 (key, type(value), repr(value)))
50 50
51 51
52 52 def assert_all_keys_are_str(environ):
53 53 """Checks that all keys of a dict are str."""
54 54 for key, value in environ.items():
55 55 assert isinstance(value, str), (
56 56 "Key %s: has type %s but 'str' was expected. " %
57 57 (repr(key), type(key)))
58 58
59 59
60 60 def assert_no_prefix_in_keys(environ, prefix):
61 61 """Checks that no key of the dict starts with the prefix."""
62 62 for key in environ:
63 63 assert not key.startswith(prefix), 'Key %s should not be present' % key
64 64
65 65
66 66 def test_get_environ():
67 67 clean_environ = wsgi_app_caller_client._get_clean_environ(BASE_ENVIRON)
68 68
69 69 assert len(clean_environ) == 7
70 70 assert_no_prefix_in_keys(clean_environ, 'wsgi.')
71 71 assert_all_keys_are_str(clean_environ)
72 72 assert_all_values_are_str(clean_environ)
73 73
74 74
75 75 def test_remote_app_caller():
76 76
77 77 class RemoteAppCallerMock(object):
78 78
79 79 def handle(self, environ, input_data, arg1, arg2,
80 80 arg3=None, arg4=None, arg5=None):
81 81 assert ((arg1, arg2, arg3, arg4, arg5) ==
82 82 ('a1', 'a2', 'a3', 'a4', None))
83 83 # Note: RemoteAppCaller is expected to return a tuple like the
84 84 # following one
85 85 return (['content'], '200 OK', [('Content-Type', 'text/plain')])
86 86
87 87 wrapper_app = wsgi_app_caller_client.RemoteAppCaller(
88 88 RemoteAppCallerMock(), 'a1', 'a2', arg3='a3', arg4='a4')
89 89
90 90 test_app = CustomTestApp(wrapper_app)
91 91
92 92 response = test_app.get('/path')
93 93
94 94 assert response.status == '200 OK'
95 95 assert sorted(response.headers.items()) == sorted([
96 96 ('Content-Type', 'text/plain'),
97 97 ('Content-Length', '7'),
98 98 ])
99 assert response.body == 'content'
99 assert response.text == 'content'
@@ -1,133 +1,136 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib.encrypt import (
24 AESCipher, SignatureVerificationError, InvalidDecryptedValue)
25 from rhodecode.lib.encrypt2 import (Encryptor, InvalidToken)
24 AESCipher, InvalidDecryptedValue)
25 from rhodecode.lib import enc_utils
26 from rhodecode.lib.str_utils import safe_str
27 from rhodecode.lib.exceptions import SignatureVerificationError
28
29
30 @pytest.mark.parametrize(
31 "algo", ['fernet', 'aes'],
32 )
33 @pytest.mark.parametrize(
34 "key, text",
35 [
36 (b'a', 'short'),
37 (b'a' * 64, 'too long(trimmed to 32)'),
38 (b'a' * 32, 'just enough'),
39 ('Δ…Δ‡Δ™Δ‡Δ™', 'non asci'),
40 ('$asa$asa', 'special $ used'),
41 ]
42 )
43 @pytest.mark.parametrize(
44 "strict_mode", [True, False],
45 )
46 def test_common_encryption_module(algo, key, text, strict_mode):
47 encrypted = enc_utils.encrypt_value(text, algo=algo, enc_key=key)
48 decrypted = enc_utils.decrypt_value(encrypted, algo=algo, enc_key=key, strict_mode=strict_mode)
49 assert text == safe_str(decrypted)
50
51
52 @pytest.mark.parametrize(
53 "algo", ['fernet', 'aes'],
54 )
55 def test_encryption_with_bad_key(algo):
56 key = b'secretstring'
57 text = b'ihatemysql'
58
59 encrypted = enc_utils.encrypt_value(text, algo=algo, enc_key=key)
60 decrypted = enc_utils.decrypt_value(encrypted, algo=algo, enc_key=b'different-key', strict_mode=False)
61
62 assert decrypted[:22] == '<InvalidDecryptedValue'
63
64
65 @pytest.mark.parametrize(
66 "algo", ['fernet', 'aes'],
67 )
68 def test_encryption_with_bad_key_raises(algo):
69 key = b'secretstring'
70 text = b'ihatemysql'
71 encrypted = enc_utils.encrypt_value(text, algo=algo, enc_key=key)
72
73 with pytest.raises(SignatureVerificationError) as e:
74 enc_utils.decrypt_value(encrypted, algo=algo, enc_key=b'different-key', strict_mode=True)
75
76 assert 'InvalidDecryptedValue' in str(e)
26 77
27 78
28 class TestEncryptModule(object):
29
30 @pytest.mark.parametrize(
31 "key, text",
32 [
33 ('a', 'short'),
34 ('a'*64, 'too long(trimmed to 32)'),
35 ('a'*32, 'just enough'),
36 ('Δ…Δ‡Δ™Δ‡Δ™', 'non asci'),
37 ('$asa$asa', 'special $ used'),
38 ]
39 )
40 def test_encryption(self, key, text):
41 enc = AESCipher(key).encrypt(text)
42 assert AESCipher(key).decrypt(enc) == text
43
44 def test_encryption_with_hmac(self):
45 key = 'secret'
46 text = 'ihatemysql'
47 enc = AESCipher(key, hmac=True).encrypt(text)
48 assert AESCipher(key, hmac=True).decrypt(enc) == text
79 @pytest.mark.parametrize(
80 "algo", ['fernet', 'aes'],
81 )
82 def test_encryption_with_bad_format_data(algo):
83 key = b'secret'
84 text = b'ihatemysql'
85 encrypted = enc_utils.encrypt_value(text, algo=algo, enc_key=key)
86 encrypted = b'$xyz' + encrypted[3:]
49 87
50 def test_encryption_with_hmac_with_bad_key(self):
51 key = 'secretstring'
52 text = 'ihatemysql'
53 enc = AESCipher(key, hmac=True).encrypt(text)
54
55 with pytest.raises(SignatureVerificationError) as e:
56 assert AESCipher('differentsecret', hmac=True).decrypt(enc) == ''
57
58 assert 'Encryption signature verification failed' in str(e)
88 with pytest.raises(ValueError) as e:
89 enc_utils.decrypt_value(encrypted, algo=algo, enc_key=key, strict_mode=True)
59 90
60 def test_encryption_with_hmac_with_bad_data(self):
61 key = 'secret'
62 text = 'ihatemysql'
63 enc = AESCipher(key, hmac=True).encrypt(text)
64 enc = 'xyz' + enc[3:]
65 with pytest.raises(SignatureVerificationError) as e:
66 assert AESCipher(key, hmac=True).decrypt(enc) == text
67
68 assert 'Encryption signature verification failed' in str(e)
69
70 def test_encryption_with_hmac_with_bad_key_not_strict(self):
71 key = 'secretstring'
72 text = 'ihatemysql'
73 enc = AESCipher(key, hmac=True).encrypt(text)
74
75 assert isinstance(AESCipher(
76 'differentsecret', hmac=True, strict_verification=False
77 ).decrypt(enc), InvalidDecryptedValue)
91 assert 'Encrypted Data has invalid format' in str(e)
78 92
79 93
80 class TestEncryptModule2(object):
94 @pytest.mark.parametrize(
95 "algo", ['fernet', 'aes'],
96 )
97 def test_encryption_with_bad_data(algo):
98 key = b'secret'
99 text = b'ihatemysql'
100 encrypted = enc_utils.encrypt_value(text, algo=algo, enc_key=key)
101 encrypted = encrypted[:-5]
81 102
82 @pytest.mark.parametrize(
83 "key, text",
84 [
85 ('a', 'short'),
86 ('a'*64, 'too long(trimmed to 32)'),
87 ('a'*32, 'just enough'),
88 ('Δ…Δ‡Δ™Δ‡Δ™', 'non asci'),
89 ('$asa$asa', 'special $ used'),
90 ]
91 )
92 def test_encryption(self, key, text):
93 enc = Encryptor(key).encrypt(text)
94 assert Encryptor(key).decrypt(enc) == text
103 with pytest.raises(SignatureVerificationError) as e:
104 enc_utils.decrypt_value(encrypted, algo=algo, enc_key=key, strict_mode=True)
105
106 assert 'SignatureVerificationError' in str(e)
107
95 108
96 def test_encryption_with_bad_key(self):
97 key = 'secretstring'
98 text = 'ihatemysql'
99 enc = Encryptor(key).encrypt(text)
109 def test_encryption_with_hmac():
110 key = b'secret'
111 text = b'ihatemysql'
112 enc = AESCipher(key, hmac=True).encrypt(text)
113 assert AESCipher(key, hmac=True).decrypt(enc) == text
100 114
101 assert Encryptor('differentsecret').decrypt(enc) == ''
102 115
103 def test_encryption_with_bad_key_raises(self):
104 key = 'secretstring'
105 text = 'ihatemysql'
106 enc = Encryptor(key).encrypt(text)
107
108 with pytest.raises(InvalidToken) as e:
109 Encryptor('differentsecret').decrypt(enc, safe=False)
116 def test_encryption_with_hmac_with_bad_data():
117 key = b'secret'
118 text = b'ihatemysql'
119 enc = AESCipher(key, hmac=True).encrypt(text)
120 enc = b'xyz' + enc[3:]
121 with pytest.raises(SignatureVerificationError) as e:
122 assert AESCipher(key, hmac=True).decrypt(enc, safe=False) == text
110 123
111 assert 'InvalidToken' in str(e)
124 assert 'SignatureVerificationError' in str(e)
112 125
113 def test_encryption_with_bad_format_data(self):
114 key = 'secret'
115 text = 'ihatemysql'
116 enc = Encryptor(key).encrypt(text)
117 enc = '$xyz' + enc[3:]
118 126
119 with pytest.raises(ValueError) as e:
120 Encryptor(key).decrypt(enc, safe=False)
121
122 assert 'Encrypted Data has invalid format' in str(e)
127 def test_encryption_with_hmac_with_bad_key_not_strict():
128 key = b'secretstring'
129 text = b'ihatemysql'
130 enc = AESCipher(key, hmac=True).encrypt(text)
123 131
124 def test_encryption_with_bad_data(self):
125 key = 'secret'
126 text = 'ihatemysql'
127 enc = Encryptor(key).encrypt(text)
128 enc = enc[:-5]
132 decrypted = AESCipher(
133 b'differentsecret', hmac=True, strict_verification=False
134 ).decrypt(enc)
129 135
130 with pytest.raises(InvalidToken) as e:
131 Encryptor(key).decrypt(enc, safe=False)
132
133 assert 'InvalidToken' in str(e)
136 assert isinstance(decrypted, InvalidDecryptedValue)
@@ -1,463 +1,463 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Module to test the performance of pull, push and clone operations.
23 23
24 24 It works by replaying a group of commits to the repo.
25 25 """
26 26
27 27 import argparse
28 28 import collections
29 29 import ConfigParser
30 30 import functools
31 31 import itertools
32 32 import os
33 33 import pprint
34 34 import shutil
35 35 import subprocess
36 36 import sys
37 37 import time
38 38
39 39 import api
40 40
41 41
42 42 def mean(container):
43 43 """Return the mean of the container."""
44 44 if not container:
45 45 return -1.0
46 46 return sum(container) / len(container)
47 47
48 48
49 49 def keep_cwd(f):
50 50 """Decorator that keeps track of the starting working directory."""
51 51 @functools.wraps(f)
52 52 def wrapped_f(*args, **kwargs):
53 53 cur_dir = os.getcwd()
54 54 try:
55 55 return f(*args, **kwargs)
56 56 finally:
57 57 os.chdir(cur_dir)
58 58
59 59 return wrapped_f
60 60
61 61
62 62 def timed(f):
63 63 """Decorator that returns the time it took to execute the function."""
64 64 @functools.wraps(f)
65 65 def wrapped_f(*args, **kwargs):
66 66 start_time = time.time()
67 67 try:
68 68 f(*args, **kwargs)
69 69 finally:
70 70 return time.time() - start_time
71 71
72 72 return wrapped_f
73 73
74 74
75 75 def execute(*popenargs, **kwargs):
76 76 """Extension of subprocess.check_output to support writing to stdin."""
77 77 input = kwargs.pop('stdin', None)
78 78 stdin = None
79 79 if input:
80 80 stdin = subprocess.PIPE
81 81 #if 'stderr' not in kwargs:
82 82 # kwargs['stderr'] = subprocess.PIPE
83 83 if 'stdout' in kwargs:
84 84 raise ValueError('stdout argument not allowed, it will be overridden.')
85 85 process = subprocess.Popen(stdin=stdin, stdout=subprocess.PIPE,
86 86 *popenargs, **kwargs)
87 87 output, error = process.communicate(input=input)
88 88 retcode = process.poll()
89 89 if retcode:
90 90 cmd = kwargs.get("args")
91 91 if cmd is None:
92 92 cmd = popenargs[0]
93 93 print('{} {} {} '.format(cmd, output, error))
94 94 raise subprocess.CalledProcessError(retcode, cmd, output=output)
95 95 return output
96 96
97 97
98 98 def get_repo_name(repo_url):
99 99 """Extract the repo name from its url."""
100 100 repo_url = repo_url.rstrip('/')
101 101 return repo_url.split('/')[-1].split('.')[0]
102 102
103 103
104 104 class TestPerformanceBase(object):
105 105 def __init__(self, base_dir, repo_url, n_commits, max_commits,
106 106 skip_commits):
107 107 self.repo_url = repo_url
108 108 self.repo_name = get_repo_name(self.repo_url)
109 109 self.upstream_repo_name = '%s_upstream' % self.repo_name
110 110 self.base_dir = os.path.abspath(base_dir)
111 111 self.n_commits = n_commits
112 112 self.max_commits = max_commits
113 113 self.skip_commits = skip_commits
114 114 self.push_times = []
115 115 self.pull_times = []
116 116 self.empty_pull_times = []
117 117 self.clone_time = -1.0
118 118 self.last_commit = None
119 119
120 120 self.cloned_repo = ''
121 121 self.pull_repo = ''
122 122 self.orig_repo = ''
123 123
124 124 def run(self):
125 125 try:
126 126 self.test()
127 127 except Exception as error:
128 128 print(error)
129 129 finally:
130 130 self.cleanup()
131 131
132 132 print('Clone time :{}'.format(self.clone_time))
133 133 print('Push time :{}'.format(mean(self.push_times)))
134 134 print('Pull time :{}'.format(mean(self.pull_times)))
135 135 print('Empty pull time:{}'.format(mean(self.empty_pull_times)))
136 136
137 137 return {
138 138 'clone': self.clone_time,
139 139 'push': mean(self.push_times),
140 140 'pull': mean(self.pull_times),
141 141 'empty_pull': mean(self.empty_pull_times),
142 142 }
143 143
144 144 @keep_cwd
145 145 def test(self):
146 146 os.chdir(self.base_dir)
147 147
148 148 self.orig_repo = os.path.join(self.base_dir, self.repo_name)
149 149 if not os.path.exists(self.orig_repo):
150 150 self.clone_repo(self.repo_url, default_only=True)
151 151
152 152 upstream_url = self.create_repo(self.upstream_repo_name, self.repo_type)
153 153
154 154 self.add_remote(self.orig_repo, upstream_url)
155 155
156 156 self.pull_repo = os.path.join(self.base_dir, '%s_pull' % self.repo_name)
157 157 self.clone_repo(upstream_url, self.pull_repo)
158 158
159 159 commits = self.get_commits(self.orig_repo)
160 160 self.last_commit = commits[-1]
161 161 if self.skip_commits:
162 162 self.push(
163 163 self.orig_repo, commits[self.skip_commits - 1], 'upstream')
164 164 commits = commits[self.skip_commits:self.max_commits]
165 165
166 166 print('Working with %d commits' % len(commits))
167 167 for i in range(self.n_commits - 1, len(commits), self.n_commits):
168 168 commit = commits[i]
169 169 print('Processing commit %s (%d)' % (commit, i + 1))
170 170 self.push_times.append(
171 171 self.push(self.orig_repo, commit, 'upstream'))
172 172 self.check_remote_last_commit_is(commit, upstream_url)
173 173
174 174 self.pull_times.append(self.pull(self.pull_repo))
175 175 self.check_local_last_commit_is(commit, self.pull_repo)
176 176
177 177 self.empty_pull_times.append(self.pull(self.pull_repo))
178 178
179 179 self.cloned_repo = os.path.join(self.base_dir,
180 180 '%s_clone' % self.repo_name)
181 181 self.clone_time = self.clone_repo(upstream_url, self.cloned_repo)
182 182
183 183 def cleanup(self):
184 184 try:
185 185 self.delete_repo(self.upstream_repo_name)
186 186 except api.ApiError:
187 187 # Continue in case we could not delete the repo. Maybe we did not
188 188 # create it in the first place.
189 189 pass
190 190
191 191 shutil.rmtree(self.pull_repo, ignore_errors=True)
192 192 shutil.rmtree(self.cloned_repo, ignore_errors=True)
193 193
194 194 if os.path.exists(self.orig_repo):
195 195 self.remove_remote(self.orig_repo)
196 196
197 197
198 198 class RhodeCodeMixin(object):
199 199 """Mixin providing the methods to create and delete repos in RhodeCode."""
200 200 def __init__(self, api_key):
201 201 self.api = api.RCApi(api_key=api_key)
202 202
203 203 def create_repo(self, repo_name, repo_type):
204 204 return self.api.create_repo(repo_name, repo_type,
205 205 'Repo for perfomance testing')
206 206
207 207 def delete_repo(self, repo_name):
208 208 return self.api.delete_repo(repo_name)
209 209
210 210
211 211 class GitMixin(object):
212 212 """Mixin providing the git operations."""
213 213 @timed
214 214 def clone_repo(self, repo_url, destination=None, default_only=False):
215 215 args = ['git', 'clone']
216 216 if default_only:
217 217 args.extend(['--branch', 'master', '--single-branch'])
218 218 args.append(repo_url)
219 219 if destination:
220 220 args.append(destination)
221 221 execute(args)
222 222
223 223 @keep_cwd
224 224 def add_remote(self, repo, remote_url, remote_name='upstream'):
225 225 self.remove_remote(repo, remote_name)
226 226 os.chdir(repo)
227 227 execute(['git', 'remote', 'add', remote_name, remote_url])
228 228
229 229 @keep_cwd
230 230 def remove_remote(self, repo, remote_name='upstream'):
231 231 os.chdir(repo)
232 232 remotes = execute(['git', 'remote']).split('\n')
233 233 if remote_name in remotes:
234 234 execute(['git', 'remote', 'remove', remote_name])
235 235
236 236 @keep_cwd
237 237 def get_commits(self, repo, branch='master'):
238 238 os.chdir(repo)
239 239 commits_list = execute(
240 240 ['git', 'log', '--first-parent', branch, '--pretty=%H'])
241 241 return commits_list.strip().split('\n')[::-1]
242 242
243 243 @timed
244 244 def push(self, repo, commit, remote_name=None):
245 245 os.chdir(repo)
246 246 try:
247 247 execute(['git', 'reset', '--soft', commit])
248 248 args = ['git', 'push']
249 249 if remote_name:
250 250 args.append(remote_name)
251 251 execute(args)
252 252 finally:
253 253 execute(['git', 'reset', '--soft', 'HEAD@{1}'])
254 254
255 255 @timed
256 256 def pull(self, repo):
257 257 os.chdir(repo)
258 258 execute(['git', 'pull'])
259 259
260 260 def _remote_last_commit(self, repo_url):
261 261 output = execute(['git', 'ls-remote', repo_url, 'HEAD'])
262 262 return output.split()[0]
263 263
264 264 def check_remote_last_commit_is(self, commit, repo_url):
265 265 last_remote_commit = self._remote_last_commit(repo_url)
266 266 if last_remote_commit != commit:
267 267 raise Exception('Push did not work, expected commit %s but got %s' %
268 268 (commit, last_remote_commit))
269 269
270 270 @keep_cwd
271 271 def _local_last_commit(self, repo):
272 272 os.chdir(repo)
273 273 return execute(['git', 'rev-parse', 'HEAD']).strip()
274 274
275 275 def check_local_last_commit_is(self, commit, repo):
276 276 last_local_commit = self._local_last_commit(repo)
277 277 if last_local_commit != commit:
278 278 raise Exception('Pull did not work, expected commit %s but got %s' %
279 279 (commit, last_local_commit))
280 280
281 281
282 282 class HgMixin(object):
283 283 """Mixin providing the mercurial operations."""
284 284 @timed
285 285 def clone_repo(self, repo_url, destination=None, default_only=False):
286 286 args = ['hg', 'clone']
287 287 if default_only:
288 288 args.extend(['--branch', 'default'])
289 289 args.append(repo_url)
290 290 if destination:
291 291 args.append(destination)
292 292 execute(args)
293 293
294 294 @keep_cwd
295 295 def add_remote(self, repo, remote_url, remote_name='upstream'):
296 296 self.remove_remote(repo, remote_name)
297 297 os.chdir(repo)
298 298 hgrc = ConfigParser.RawConfigParser()
299 299 hgrc.read('.hg/hgrc')
300 300 hgrc.set('paths', remote_name, remote_url)
301 301 with open('.hg/hgrc', 'w') as f:
302 302 hgrc.write(f)
303 303
304 304 @keep_cwd
305 305 def remove_remote(self, repo, remote_name='upstream'):
306 306 os.chdir(repo)
307 307 hgrc = ConfigParser.RawConfigParser()
308 308 hgrc.read('.hg/hgrc')
309 309 hgrc.remove_option('paths', remote_name)
310 310 with open('.hg/hgrc', 'w') as f:
311 311 hgrc.write(f)
312 312
313 313 @keep_cwd
314 314 def get_commits(self, repo, branch='default'):
315 315 os.chdir(repo)
316 316 # See http://stackoverflow.com/questions/15376649/is-there-a-mercurial-equivalent-to-git-log-first-parent
317 317 commits_list = execute(['hg', 'log', '--branch', branch, '--template',
318 318 '{node}\n', '--follow-first'])
319 319 return commits_list.strip().split('\n')[::-1]
320 320
321 321 @timed
322 322 def push(self, repo, commit, remote_name=None):
323 323 os.chdir(repo)
324 324 args = ['hg', 'push', '--rev', commit, '--new-branch']
325 325 if remote_name:
326 326 args.append(remote_name)
327 327 execute(args)
328 328
329 329 @timed
330 330 def pull(self, repo):
331 331 os.chdir(repo)
332 332 execute(['hg', '--config', 'alias.pull=pull', 'pull', '-u'])
333 333
334 334 def _remote_last_commit(self, repo_url):
335 335 return execute(['hg', 'identify', repo_url])[:12]
336 336
337 337 def check_remote_last_commit_is(self, commit, repo_url):
338 338 last_remote_commit = self._remote_last_commit(repo_url)
339 339 if not commit.startswith(last_remote_commit):
340 340 raise Exception('Push did not work, expected commit %s but got %s' %
341 341 (commit, last_remote_commit))
342 342
343 343 @keep_cwd
344 344 def _local_last_commit(self, repo):
345 345 os.chdir(repo)
346 346 return execute(['hg', 'identify'])[:12]
347 347
348 348 def check_local_last_commit_is(self, commit, repo):
349 349 last_local_commit = self._local_last_commit(repo)
350 350 if not commit.startswith(last_local_commit):
351 351 raise Exception('Pull did not work, expected commit %s but got %s' %
352 352 (commit, last_local_commit))
353 353
354 354
355 355 class GitTestPerformance(GitMixin, RhodeCodeMixin, TestPerformanceBase):
356 356 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
357 357 api_key):
358 358 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
359 359 max_commits, skip_commits)
360 360 RhodeCodeMixin.__init__(self, api_key)
361 361 self.repo_type = 'git'
362 362
363 363
364 364 class HgTestPerformance(HgMixin, RhodeCodeMixin, TestPerformanceBase):
365 365 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
366 366 api_key):
367 367 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
368 368 max_commits, skip_commits)
369 369 RhodeCodeMixin.__init__(self, api_key)
370 370 self.repo_type = 'hg'
371 371
372 372
373 373 def get_test(base_dir, repo_url, repo_type, step, max_commits, skip_commits,
374 374 api_key):
375 375 max_commits = min(10 * step,
376 376 int((max_commits - skip_commits) / step) * step)
377 377 max_commits += skip_commits
378 378 if repo_type == 'git':
379 379 return GitTestPerformance(
380 380 base_dir, repo_url, step, max_commits, skip_commits, api_key)
381 381 elif repo_type == 'hg':
382 382 return HgTestPerformance(
383 383 base_dir, repo_url, step, max_commits, skip_commits, api_key)
384 384
385 385
386 386 def main(argv):
387 387 parser = argparse.ArgumentParser(
388 388 description='Performance tests for push/pull/clone for git and ' +
389 389 'mercurial repos.')
390 390 parser.add_argument(
391 391 '--tests', dest='tests', action='store', required=False, default='all',
392 392 help='The tests to run. Default: all. But could be any comma ' +
393 393 'separated list with python, hg, kernel or git')
394 394 parser.add_argument(
395 395 '--sizes', dest='sizes', action='store', required=False,
396 396 default='1,10,100,1000,2500',
397 397 help='The sizes to use. Default: 1,10,100,1000,2500')
398 398 parser.add_argument(
399 399 '--dir', dest='dir', action='store', required=True,
400 400 help='The dir where to store the repos')
401 401 parser.add_argument(
402 402 '--api-key', dest='api_key', action='store', required=True,
403 403 help='The api key of RhodeCode')
404 404 options = parser.parse_args(argv[1:])
405 405 print(options)
406 406
407 407 test_config = {
408 408 'python': {
409 409 'url': 'https://hg.python.org/cpython/',
410 410 'limit': 23322,
411 411 'type': 'hg',
412 412 # Do not time the first commit, as it is HUGE!
413 413 'skip': 1,
414 414 },
415 415 'hg': {
416 416 'url': 'http://selenic.com/hg',
417 417 'limit': 14396,
418 418 'type': 'hg',
419 419 },
420 420 'kernel': {
421 421 'url': 'https://github.com/torvalds/linux.git',
422 422 'limit': 46271,
423 423 'type': 'git',
424 424 },
425 425 'git': {
426 426 'url': 'https://github.com/git/git.git',
427 427 'limit': 13525,
428 428 'type': 'git',
429 429 }
430 430
431 431 }
432 432
433 433 test_names = options.tests.split(',')
434 434 if test_names == ['all']:
435 435 test_names = test_config.keys()
436 436 if not set(test_names) <= set(test_config.keys()):
437 437 print('Invalid tests: only %s are valid but specified %s' %
438 438 (test_config.keys(), test_names))
439 439 return 1
440 440
441 441 sizes = options.sizes.split(',')
442 sizes = map(int, sizes)
442 sizes = list(map(int, sizes))
443 443
444 444 base_dir = options.dir
445 445 api_key = options.api_key
446 446 results = collections.defaultdict(dict)
447 447 for test_name, size in itertools.product(test_names, sizes):
448 448 test = get_test(base_dir,
449 449 test_config[test_name]['url'],
450 450 test_config[test_name]['type'],
451 451 size,
452 452 test_config[test_name]['limit'],
453 453 test_config[test_name].get('skip', 0),
454 454 api_key)
455 455 print('*' * 80)
456 456 print('Running performance test: %s with size %d' % (test_name, size))
457 457 print('*' * 80)
458 458 results[test_name][size] = test.run()
459 459 pprint.pprint(dict(results))
460 460
461 461
462 462 if __name__ == '__main__':
463 463 sys.exit(main(sys.argv))
@@ -1,472 +1,486 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import threading
22 22 import time
23 23 import logging
24 24 import os.path
25 25 import subprocess
26 26 import tempfile
27 import urllib.request, urllib.error, urllib.parse
27 import urllib.request
28 import urllib.error
29 import urllib.parse
28 30 from lxml.html import fromstring, tostring
29 31 from lxml.cssselect import CSSSelector
30 32 from urllib.parse import unquote_plus
31 33 import webob
32 34
33 35 from webtest.app import TestResponse, TestApp
34 36 from webtest.compat import print_stderr
35 37
36 38 import pytest
37 39
38 40 try:
39 41 import rc_testdata
40 42 except ImportError:
41 43 raise ImportError('Failed to import rc_testdata, '
42 44 'please make sure this package is installed from requirements_test.txt')
43 45
44 46 from rhodecode.model.db import User, Repository
45 47 from rhodecode.model.meta import Session
46 48 from rhodecode.model.scm import ScmModel
47 49 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
48 50 from rhodecode.lib.vcs.backends.base import EmptyCommit
49 51 from rhodecode.tests import login_user_session
50 52
51 53 log = logging.getLogger(__name__)
52 54
53 55
54 56 class CustomTestResponse(TestResponse):
55 57
56 58 def _save_output(self, out):
57 f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html')
59 f = tempfile.NamedTemporaryFile(mode='w', delete=False, prefix='rc-test-', suffix='.html')
58 60 f.write(out)
59 61 return f.name
60 62
61 63 def mustcontain(self, *strings, **kw):
62 64 """
63 65 Assert that the response contains all of the strings passed
64 66 in as arguments.
65 67
66 68 Equivalent to::
67 69
68 70 assert string in res
69 71 """
70 72 print_body = kw.pop('print_body', False)
71 73 if 'no' in kw:
72 74 no = kw['no']
73 75 del kw['no']
74 76 if isinstance(no, str):
75 77 no = [no]
76 78 else:
77 79 no = []
78 80 if kw:
79 81 raise TypeError(
80 82 "The only keyword argument allowed is 'no' got %s" % kw)
81 83
82 84 f = self._save_output(str(self))
83 85
84 86 for s in strings:
85 if not s in self:
87 if s not in self:
86 88 print_stderr("Actual response (no %r):" % s)
87 89 print_stderr("body output saved as `%s`" % f)
88 90 if print_body:
89 91 print_stderr(str(self))
90 92 raise IndexError(
91 93 "Body does not contain string %r, body output saved as %s" % (s, f))
92 94
93 95 for no_s in no:
94 96 if no_s in self:
95 97 print_stderr("Actual response (has %r)" % no_s)
96 98 print_stderr("body output saved as `%s`" % f)
97 99 if print_body:
98 100 print_stderr(str(self))
99 101 raise IndexError(
100 102 "Body contains bad string %r, body output saved as %s" % (no_s, f))
101 103
102 104 def assert_response(self):
103 105 return AssertResponse(self)
104 106
105 107 def get_session_from_response(self):
106 108 """
107 109 This returns the session from a response object.
108 110 """
109 111 from rhodecode.lib.rc_beaker import session_factory_from_settings
110 112 session = session_factory_from_settings(self.test_app._pyramid_settings)
111 113 return session(self.request)
112 114
113 115
114 116 class TestRequest(webob.BaseRequest):
115 117
116 # for py.test
118 # for py.test, so it doesn't try to run this tas by name starting with test...
117 119 disabled = True
118 120 ResponseClass = CustomTestResponse
119 121
120 122 def add_response_callback(self, callback):
121 123 pass
122 124
125 @classmethod
126 def blank(cls, path, environ=None, base_url=None,
127 headers=None, POST=None, **kw):
128
129 if not path.isascii():
130 # our custom quote path if it contains non-ascii chars
131 path = urllib.parse.quote(path)
132
133 return super(TestRequest, cls).blank(
134 path, environ=environ, base_url=base_url, headers=headers, POST=POST, **kw)
135
123 136
124 137 class CustomTestApp(TestApp):
125 138 """
126 139 Custom app to make mustcontain more Useful, and extract special methods
127 140 """
128 141 RequestClass = TestRequest
129 142 rc_login_data = {}
130 143 rc_current_session = None
131 144
132 145 def login(self, username=None, password=None):
133 146 from rhodecode.lib import auth
134 147
135 148 if username and password:
136 149 session = login_user_session(self, username, password)
137 150 else:
138 151 session = login_user_session(self)
139 152
140 153 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
141 154 self.rc_current_session = session
142 155 return session['rhodecode_user']
143 156
144 157 @property
145 158 def csrf_token(self):
146 159 return self.rc_login_data['csrf_token']
147 160
148 161 @property
149 162 def _pyramid_registry(self):
150 163 return self.app.config.registry
151 164
152 165 @property
153 166 def _pyramid_settings(self):
154 167 return self._pyramid_registry.settings
155 168
156 169
157 170 def set_anonymous_access(enabled):
158 171 """(Dis)allows anonymous access depending on parameter `enabled`"""
159 172 user = User.get_default_user()
160 173 user.active = enabled
161 174 Session().add(user)
162 175 Session().commit()
163 176 time.sleep(1.5) # must sleep for cache (1s to expire)
164 177 log.info('anonymous access is now: %s', enabled)
165 178 assert enabled == User.get_default_user().active, (
166 179 'Cannot set anonymous access')
167 180
168 181
169 182 def check_xfail_backends(node, backend_alias):
170 183 # Using "xfail_backends" here intentionally, since this marks work
171 184 # which is "to be done" soon.
172 185 skip_marker = node.get_closest_marker('xfail_backends')
173 186 if skip_marker and backend_alias in skip_marker.args:
174 187 msg = "Support for backend %s to be developed." % (backend_alias, )
175 188 msg = skip_marker.kwargs.get('reason', msg)
176 189 pytest.xfail(msg)
177 190
178 191
179 192 def check_skip_backends(node, backend_alias):
180 193 # Using "skip_backends" here intentionally, since this marks work which is
181 194 # not supported.
182 195 skip_marker = node.get_closest_marker('skip_backends')
183 196 if skip_marker and backend_alias in skip_marker.args:
184 197 msg = "Feature not supported for backend %s." % (backend_alias, )
185 198 msg = skip_marker.kwargs.get('reason', msg)
186 199 pytest.skip(msg)
187 200
188 201
189 202 def extract_git_repo_from_dump(dump_name, repo_name):
190 203 """Create git repo `repo_name` from dump `dump_name`."""
191 204 repos_path = ScmModel().repos_path
192 205 target_path = os.path.join(repos_path, repo_name)
193 206 rc_testdata.extract_git_dump(dump_name, target_path)
194 207 return target_path
195 208
196 209
197 210 def extract_hg_repo_from_dump(dump_name, repo_name):
198 211 """Create hg repo `repo_name` from dump `dump_name`."""
199 212 repos_path = ScmModel().repos_path
200 213 target_path = os.path.join(repos_path, repo_name)
201 214 rc_testdata.extract_hg_dump(dump_name, target_path)
202 215 return target_path
203 216
204 217
205 218 def extract_svn_repo_from_dump(dump_name, repo_name):
206 219 """Create a svn repo `repo_name` from dump `dump_name`."""
207 220 repos_path = ScmModel().repos_path
208 221 target_path = os.path.join(repos_path, repo_name)
209 222 SubversionRepository(target_path, create=True)
210 223 _load_svn_dump_into_repo(dump_name, target_path)
211 224 return target_path
212 225
213 226
214 227 def assert_message_in_log(log_records, message, levelno, module):
215 228 messages = [
216 229 r.message for r in log_records
217 230 if r.module == module and r.levelno == levelno
218 231 ]
219 232 assert message in messages
220 233
221 234
222 235 def _load_svn_dump_into_repo(dump_name, repo_path):
223 236 """
224 237 Utility to populate a svn repository with a named dump
225 238
226 239 Currently the dumps are in rc_testdata. They might later on be
227 240 integrated with the main repository once they stabilize more.
228 241 """
229 242 dump = rc_testdata.load_svn_dump(dump_name)
230 243 load_dump = subprocess.Popen(
231 244 ['svnadmin', 'load', repo_path],
232 245 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
233 246 stderr=subprocess.PIPE)
234 247 out, err = load_dump.communicate(dump)
235 248 if load_dump.returncode != 0:
236 249 log.error("Output of load_dump command: %s", out)
237 250 log.error("Error output of load_dump command: %s", err)
238 251 raise Exception(
239 252 'Failed to load dump "%s" into repository at path "%s".'
240 253 % (dump_name, repo_path))
241 254
242 255
243 256 class AssertResponse(object):
244 257 """
245 258 Utility that helps to assert things about a given HTML response.
246 259 """
247 260
248 261 def __init__(self, response):
249 262 self.response = response
250 263
251 264 def get_imports(self):
252 265 return fromstring, tostring, CSSSelector
253 266
254 267 def one_element_exists(self, css_selector):
255 268 self.get_element(css_selector)
256 269
257 270 def no_element_exists(self, css_selector):
258 271 assert not self._get_elements(css_selector)
259 272
260 273 def element_equals_to(self, css_selector, expected_content):
261 274 element = self.get_element(css_selector)
262 275 element_text = self._element_to_string(element)
276
263 277 assert expected_content in element_text
264 278
265 279 def element_contains(self, css_selector, expected_content):
266 280 element = self.get_element(css_selector)
267 281 assert expected_content in element.text_content()
268 282
269 283 def element_value_contains(self, css_selector, expected_content):
270 284 element = self.get_element(css_selector)
271 285 assert expected_content in element.value
272 286
273 287 def contains_one_link(self, link_text, href):
274 288 fromstring, tostring, CSSSelector = self.get_imports()
275 289 doc = fromstring(self.response.body)
276 290 sel = CSSSelector('a[href]')
277 291 elements = [
278 292 e for e in sel(doc) if e.text_content().strip() == link_text]
279 293 assert len(elements) == 1, "Did not find link or found multiple links"
280 294 self._ensure_url_equal(elements[0].attrib.get('href'), href)
281 295
282 296 def contains_one_anchor(self, anchor_id):
283 297 fromstring, tostring, CSSSelector = self.get_imports()
284 298 doc = fromstring(self.response.body)
285 299 sel = CSSSelector('#' + anchor_id)
286 300 elements = sel(doc)
287 301 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
288 302
289 303 def _ensure_url_equal(self, found, expected):
290 304 assert _Url(found) == _Url(expected)
291 305
292 306 def get_element(self, css_selector):
293 307 elements = self._get_elements(css_selector)
294 308 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
295 309 return elements[0]
296 310
297 311 def get_elements(self, css_selector):
298 312 return self._get_elements(css_selector)
299 313
300 314 def _get_elements(self, css_selector):
301 315 fromstring, tostring, CSSSelector = self.get_imports()
302 316 doc = fromstring(self.response.body)
303 317 sel = CSSSelector(css_selector)
304 318 elements = sel(doc)
305 319 return elements
306 320
307 321 def _element_to_string(self, element):
308 322 fromstring, tostring, CSSSelector = self.get_imports()
309 return tostring(element)
323 return tostring(element, encoding='unicode')
310 324
311 325
312 326 class _Url(object):
313 327 """
314 328 A url object that can be compared with other url orbjects
315 329 without regard to the vagaries of encoding, escaping, and ordering
316 330 of parameters in query strings.
317 331
318 332 Inspired by
319 333 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
320 334 """
321 335
322 336 def __init__(self, url):
323 337 parts = urllib.parse.urlparse(url)
324 338 _query = frozenset(urllib.parse.parse_qsl(parts.query))
325 339 _path = unquote_plus(parts.path)
326 340 parts = parts._replace(query=_query, path=_path)
327 341 self.parts = parts
328 342
329 343 def __eq__(self, other):
330 344 return self.parts == other.parts
331 345
332 346 def __hash__(self):
333 347 return hash(self.parts)
334 348
335 349
336 350 def run_test_concurrently(times, raise_catched_exc=True):
337 351 """
338 352 Add this decorator to small pieces of code that you want to test
339 353 concurrently
340 354
341 355 ex:
342 356
343 357 @test_concurrently(25)
344 358 def my_test_function():
345 359 ...
346 360 """
347 361 def test_concurrently_decorator(test_func):
348 362 def wrapper(*args, **kwargs):
349 363 exceptions = []
350 364
351 365 def call_test_func():
352 366 try:
353 367 test_func(*args, **kwargs)
354 368 except Exception as e:
355 369 exceptions.append(e)
356 370 if raise_catched_exc:
357 371 raise
358 372 threads = []
359 373 for i in range(times):
360 374 threads.append(threading.Thread(target=call_test_func))
361 375 for t in threads:
362 376 t.start()
363 377 for t in threads:
364 378 t.join()
365 379 if exceptions:
366 380 raise Exception(
367 381 'test_concurrently intercepted %s exceptions: %s' % (
368 382 len(exceptions), exceptions))
369 383 return wrapper
370 384 return test_concurrently_decorator
371 385
372 386
373 387 def wait_for_url(url, timeout=10):
374 388 """
375 389 Wait until URL becomes reachable.
376 390
377 391 It polls the URL until the timeout is reached or it became reachable.
378 392 If will call to `py.test.fail` in case the URL is not reachable.
379 393 """
380 394 timeout = time.time() + timeout
381 395 last = 0
382 396 wait = 0.1
383 397
384 398 while timeout > last:
385 399 last = time.time()
386 400 if is_url_reachable(url):
387 401 break
388 402 elif (last + wait) > time.time():
389 403 # Go to sleep because not enough time has passed since last check.
390 404 time.sleep(wait)
391 405 else:
392 406 pytest.fail("Timeout while waiting for URL {}".format(url))
393 407
394 408
395 409 def is_url_reachable(url):
396 410 try:
397 411 urllib.request.urlopen(url)
398 412 except urllib.error.URLError:
399 413 log.exception('URL `{}` reach error'.format(url))
400 414 return False
401 415 return True
402 416
403 417
404 418 def repo_on_filesystem(repo_name):
405 419 from rhodecode.lib import vcs
406 420 from rhodecode.tests import TESTS_TMP_PATH
407 421 repo = vcs.get_vcs_instance(
408 422 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
409 423 return repo is not None
410 424
411 425
412 426 def commit_change(
413 427 repo, filename, content, message, vcs_type, parent=None, newfile=False):
414 428 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
415 429
416 430 repo = Repository.get_by_repo_name(repo)
417 431 _commit = parent
418 432 if not parent:
419 433 _commit = EmptyCommit(alias=vcs_type)
420 434
421 435 if newfile:
422 436 nodes = {
423 437 filename: {
424 438 'content': content
425 439 }
426 440 }
427 441 commit = ScmModel().create_nodes(
428 442 user=TEST_USER_ADMIN_LOGIN, repo=repo,
429 443 message=message,
430 444 nodes=nodes,
431 445 parent_commit=_commit,
432 446 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
433 447 )
434 448 else:
435 449 commit = ScmModel().commit_change(
436 450 repo=repo.scm_instance(), repo_name=repo.repo_name,
437 451 commit=parent, user=TEST_USER_ADMIN_LOGIN,
438 452 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
439 453 message=message,
440 454 content=content,
441 455 f_path=filename
442 456 )
443 457 return commit
444 458
445 459
446 460 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
447 461 if not default:
448 462 raise ValueError('Permission for default user must be given')
449 463 form_data = [(
450 464 'csrf_token', csrf_token
451 465 )]
452 466 # add default
453 467 form_data.extend([
454 468 ('u_perm_1', default)
455 469 ])
456 470
457 471 if grant:
458 472 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
459 473 form_data.extend([
460 474 ('perm_new_member_perm_new{}'.format(cnt), perm),
461 475 ('perm_new_member_id_new{}'.format(cnt), obj_id),
462 476 ('perm_new_member_name_new{}'.format(cnt), obj_name),
463 477 ('perm_new_member_type_new{}'.format(cnt), obj_type),
464 478
465 479 ])
466 480 if revoke:
467 481 for obj_id, obj_type in revoke:
468 482 form_data.extend([
469 483 ('perm_del_member_id_{}'.format(obj_id), obj_id),
470 484 ('perm_del_member_type_{}'.format(obj_id), obj_type),
471 485 ])
472 486 return form_data
@@ -1,193 +1,199 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base for test suite for making push/pull operations.
23 23
24 24 .. important::
25 25
26 26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 27 to redirect things to stderr instead of stdout.
28 28 """
29 29
30 30 from os.path import join as jn
31 31 from subprocess import Popen, PIPE
32 32 import logging
33 33 import os
34 34 import tempfile
35 35
36 from rhodecode.lib.str_utils import safe_str
36 37 from rhodecode.tests import GIT_REPO, HG_REPO
37 38
38 39 DEBUG = True
39 40 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
40 41 REPO_GROUP = 'a_repo_group'
41 42 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
42 43 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
43 44
44 45 log = logging.getLogger(__name__)
45 46
46 47
47 48 class Command(object):
48 49
49 50 def __init__(self, cwd):
50 51 self.cwd = cwd
51 52 self.process = None
52 53
53 54 def execute(self, cmd, *args):
54 55 """
55 56 Runs command on the system with given ``args``.
56 57 """
57 58
58 59 command = cmd + ' ' + ' '.join(args)
59 60 if DEBUG:
60 61 log.debug('*** CMD %s ***', command)
61 62
62 63 env = dict(os.environ)
63 64 # Delete coverage variables, as they make the test fail for Mercurial
64 65 for key in env.keys():
65 66 if key.startswith('COV_CORE_'):
66 67 del env[key]
67 68
68 69 self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
69 70 cwd=self.cwd, env=env)
70 71 stdout, stderr = self.process.communicate()
72
73 stdout = safe_str(stdout)
74 stderr = safe_str(stderr)
75
71 76 if DEBUG:
72 77 log.debug('STDOUT:%s', stdout)
73 78 log.debug('STDERR:%s', stderr)
74 79 return stdout, stderr
75 80
76 81 def assert_returncode_success(self):
77 82 assert self.process.returncode == 0
78 83
79 84
80 85 def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None, new_branch=False, **kwargs):
81 git_ident = "git config user.name {} && git config user.email {}".format(
82 'Marcin KuΕΊminski', 'me@email.com')
86 full_name = 'Marcin KuΕΊminski'
87 email = 'me@email.com'
88 git_ident = f"git config user.name {full_name} && git config user.email {email}"
83 89 cwd = path = jn(dest)
84 90
85 91 tags = tags or []
86 92 added_file = jn(path, '{}_setup.py'.format(next(tempfile._RandomNameSequence())))
87 93 Command(cwd).execute('touch %s' % added_file)
88 94 Command(cwd).execute('%s add %s' % (vcs, added_file))
89 95 author_str = 'Marcin KuΕΊminski <me@email.com>'
90 96
91 97 for i in range(kwargs.get('files_no', 3)):
92 98 cmd = """echo 'added_line%s' >> %s""" % (i, added_file)
93 99 Command(cwd).execute(cmd)
94 100
95 101 if vcs == 'hg':
96 102 cmd = """hg commit -m 'committed new %s' -u '%s' %s """ % (
97 103 i, author_str, added_file
98 104 )
99 105 elif vcs == 'git':
100 106 cmd = """%s && git commit -m 'committed new %s' %s""" % (
101 107 git_ident, i, added_file)
102 108 Command(cwd).execute(cmd)
103 109
104 110 for tag in tags:
105 111 if vcs == 'hg':
106 112 Command(cwd).execute(
107 113 'hg tag -m "{}" -u "{}" '.format(tag['commit'], author_str), tag['name'])
108 114 elif vcs == 'git':
109 115 if tag['commit']:
110 116 # annotated tag
111 117 _stdout, _stderr = Command(cwd).execute(
112 118 """%s && git tag -a %s -m "%s" """ % (
113 119 git_ident, tag['name'], tag['commit']))
114 120 else:
115 121 # lightweight tag
116 122 _stdout, _stderr = Command(cwd).execute(
117 123 """%s && git tag %s""" % (
118 124 git_ident, tag['name']))
119 125
120 126
121 127 def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None,
122 128 new_branch=False, **kwargs):
123 129 """
124 130 Generate some files, add it to DEST repo and push back
125 131 vcs is git or hg and defines what VCS we want to make those files for
126 132 """
127 133 git_ident = "git config user.name {} && git config user.email {}".format(
128 134 'Marcin KuΕΊminski', 'me@email.com')
129 135 cwd = path = jn(dest)
130 136
131 137 # commit some stuff into this repo
132 138 _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs)
133 139
134 140 default_target_branch = {
135 141 'git': 'master',
136 142 'hg': 'default'
137 143 }.get(vcs)
138 144
139 145 target_branch = target_branch or default_target_branch
140 146
141 147 # PUSH it back
142 148 stdout = stderr = None
143 149 if vcs == 'hg':
144 150 maybe_new_branch = ''
145 151 if new_branch:
146 152 maybe_new_branch = '--new-branch'
147 153 stdout, stderr = Command(cwd).execute(
148 154 'hg push --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url)
149 155 )
150 156 elif vcs == 'git':
151 157 stdout, stderr = Command(cwd).execute(
152 158 """{} &&
153 159 git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch)
154 160 )
155 161
156 162 return stdout, stderr
157 163
158 164
159 165 def _check_proper_git_push(
160 166 stdout, stderr, branch='master', should_set_default_branch=False):
161 167 # Note: Git is writing most information to stderr intentionally
162 168 assert 'fatal' not in stderr
163 169 assert 'rejected' not in stderr
164 170 assert 'Pushing to' in stderr
165 171 assert '%s -> %s' % (branch, branch) in stderr
166 172
167 173 if should_set_default_branch:
168 174 assert "Setting default branch to %s" % branch in stderr
169 175 else:
170 176 assert "Setting default branch" not in stderr
171 177
172 178
173 179 def _check_proper_hg_push(stdout, stderr, branch='default'):
174 180 assert 'pushing to' in stdout
175 181 assert 'searching for changes' in stdout
176 182
177 183 assert 'abort:' not in stderr
178 184
179 185
180 186 def _check_proper_clone(stdout, stderr, vcs):
181 187 if vcs == 'hg':
182 188 assert 'requesting all changes' in stdout
183 189 assert 'adding changesets' in stdout
184 190 assert 'adding manifests' in stdout
185 191 assert 'adding file changes' in stdout
186 192
187 193 assert stderr == ''
188 194
189 195 if vcs == 'git':
190 196 assert '' == stdout
191 197 assert 'Cloning into' in stderr
192 198 assert 'abort:' not in stderr
193 199 assert 'fatal:' not in stderr
General Comments 0
You need to be logged in to leave comments. Login now