##// END OF EJS Templates
tests: multiple tests cases fixes for python3
super-admin -
r4994:4e9283a1 default
parent child Browse files
Show More
@@ -1,76 +1,75 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.apps._base import ADMIN_PREFIX
23 from rhodecode.apps._base import ADMIN_PREFIX
24 from rhodecode.model.db import User, UserEmailMap, Repository, UserFollowing
24 from rhodecode.model.db import User, UserEmailMap, Repository, UserFollowing
25 from rhodecode.tests import (
25 from rhodecode.tests import (
26 TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL,
26 TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL,
27 assert_session_flash)
27 assert_session_flash)
28 from rhodecode.tests.fixture import Fixture
28 from rhodecode.tests.fixture import Fixture
29
29
30 fixture = Fixture()
30 fixture = Fixture()
31
31
32
32
33 def route_path(name, **kwargs):
33 def route_path(name, **kwargs):
34 return {
34 return {
35 'my_account_repos':
35 'my_account_repos':
36 ADMIN_PREFIX + '/my_account/repos',
36 ADMIN_PREFIX + '/my_account/repos',
37 'my_account_watched':
37 'my_account_watched':
38 ADMIN_PREFIX + '/my_account/watched',
38 ADMIN_PREFIX + '/my_account/watched',
39 'my_account_perms':
39 'my_account_perms':
40 ADMIN_PREFIX + '/my_account/perms',
40 ADMIN_PREFIX + '/my_account/perms',
41 'my_account_notifications':
41 'my_account_notifications':
42 ADMIN_PREFIX + '/my_account/notifications',
42 ADMIN_PREFIX + '/my_account/notifications',
43 }[name].format(**kwargs)
43 }[name].format(**kwargs)
44
44
45
45
46 class TestMyAccountSimpleViews(TestController):
46 class TestMyAccountSimpleViews(TestController):
47
47
48 def test_my_account_my_repos(self, autologin_user):
48 def test_my_account_my_repos(self, autologin_user):
49 response = self.app.get(route_path('my_account_repos'))
49 response = self.app.get(route_path('my_account_repos'))
50 repos = Repository.query().filter(
50 repos = Repository.query().filter(
51 Repository.user == User.get_by_username(
51 Repository.user == User.get_by_username(
52 TEST_USER_ADMIN_LOGIN)).all()
52 TEST_USER_ADMIN_LOGIN)).all()
53 for repo in repos:
53 for repo in repos:
54 response.mustcontain('"name_raw": "%s"' % repo.repo_name)
54 response.mustcontain(f'"name_raw":"{repo.repo_name}"')
55
55
56 def test_my_account_my_watched(self, autologin_user):
56 def test_my_account_my_watched(self, autologin_user):
57 response = self.app.get(route_path('my_account_watched'))
57 response = self.app.get(route_path('my_account_watched'))
58
58
59 repos = UserFollowing.query().filter(
59 repos = UserFollowing.query().filter(
60 UserFollowing.user == User.get_by_username(
60 UserFollowing.user == User.get_by_username(
61 TEST_USER_ADMIN_LOGIN)).all()
61 TEST_USER_ADMIN_LOGIN)).all()
62 for repo in repos:
62 for repo in repos:
63 response.mustcontain(
63 response.mustcontain(f'"name_raw":"{repo.follows_repository.repo_name}"')
64 '"name_raw": "%s"' % repo.follows_repository.repo_name)
65
64
66 def test_my_account_perms(self, autologin_user):
65 def test_my_account_perms(self, autologin_user):
67 response = self.app.get(route_path('my_account_perms'))
66 response = self.app.get(route_path('my_account_perms'))
68 assert_response = response.assert_response()
67 assert_response = response.assert_response()
69 assert assert_response.get_elements('.perm_tag.none')
68 assert assert_response.get_elements('.perm_tag.none')
70 assert assert_response.get_elements('.perm_tag.read')
69 assert assert_response.get_elements('.perm_tag.read')
71 assert assert_response.get_elements('.perm_tag.write')
70 assert assert_response.get_elements('.perm_tag.write')
72 assert assert_response.get_elements('.perm_tag.admin')
71 assert assert_response.get_elements('.perm_tag.admin')
73
72
74 def test_my_account_notifications(self, autologin_user):
73 def test_my_account_notifications(self, autologin_user):
75 response = self.app.get(route_path('my_account_notifications'))
74 response = self.app.get(route_path('my_account_notifications'))
76 response.mustcontain('Test flash message')
75 response.mustcontain('Test flash message')
@@ -1,220 +1,220 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import re
21 import re
22
22
23 import pytest
23 import pytest
24
24
25 from rhodecode.apps.repository.views.repo_changelog import DEFAULT_CHANGELOG_SIZE
25 from rhodecode.apps.repository.views.repo_changelog import DEFAULT_CHANGELOG_SIZE
26 from rhodecode.tests import TestController
26 from rhodecode.tests import TestController
27
27
28 MATCH_HASH = re.compile(r'<span class="commit_hash">r(\d+):[\da-f]+</span>')
28 MATCH_HASH = re.compile(r'<span class="commit_hash">r(\d+):[\da-f]+</span>')
29
29
30
30
31 def route_path(name, params=None, **kwargs):
31 def route_path(name, params=None, **kwargs):
32 import urllib.request, urllib.parse, urllib.error
32 import urllib.request, urllib.parse, urllib.error
33
33
34 base_url = {
34 base_url = {
35 'repo_changelog': '/{repo_name}/changelog',
35 'repo_changelog': '/{repo_name}/changelog',
36 'repo_commits': '/{repo_name}/commits',
36 'repo_commits': '/{repo_name}/commits',
37 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
37 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
38 'repo_commits_elements': '/{repo_name}/commits_elements',
38 'repo_commits_elements': '/{repo_name}/commits_elements',
39 }[name].format(**kwargs)
39 }[name].format(**kwargs)
40
40
41 if params:
41 if params:
42 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
42 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
43 return base_url
43 return base_url
44
44
45
45
46 def assert_commits_on_page(response, indexes):
46 def assert_commits_on_page(response, indexes):
47 found_indexes = [int(idx) for idx in MATCH_HASH.findall(response.body)]
47 found_indexes = [int(idx) for idx in MATCH_HASH.findall(response.body)]
48 assert found_indexes == indexes
48 assert found_indexes == indexes
49
49
50
50
51 class TestChangelogController(TestController):
51 class TestChangelogController(TestController):
52
52
53 def test_commits_page(self, backend):
53 def test_commits_page(self, backend):
54 self.log_user()
54 self.log_user()
55 response = self.app.get(
55 response = self.app.get(
56 route_path('repo_commits', repo_name=backend.repo_name))
56 route_path('repo_commits', repo_name=backend.repo_name))
57
57
58 first_idx = -1
58 first_idx = -1
59 last_idx = -DEFAULT_CHANGELOG_SIZE
59 last_idx = -DEFAULT_CHANGELOG_SIZE
60 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
60 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
61
61
62 def test_changelog(self, backend):
62 def test_changelog(self, backend):
63 self.log_user()
63 self.log_user()
64 response = self.app.get(
64 response = self.app.get(
65 route_path('repo_changelog', repo_name=backend.repo_name))
65 route_path('repo_changelog', repo_name=backend.repo_name))
66
66
67 first_idx = -1
67 first_idx = -1
68 last_idx = -DEFAULT_CHANGELOG_SIZE
68 last_idx = -DEFAULT_CHANGELOG_SIZE
69 self.assert_commit_range_on_page(
69 self.assert_commit_range_on_page(
70 response, first_idx, last_idx, backend)
70 response, first_idx, last_idx, backend)
71
71
72 @pytest.mark.backends("hg", "git")
72 @pytest.mark.backends("hg", "git")
73 def test_changelog_filtered_by_branch(self, backend):
73 def test_changelog_filtered_by_branch(self, backend):
74 self.log_user()
74 self.log_user()
75 self.app.get(
75 self.app.get(
76 route_path('repo_changelog', repo_name=backend.repo_name,
76 route_path('repo_changelog', repo_name=backend.repo_name,
77 params=dict(branch=backend.default_branch_name)),
77 params=dict(branch=backend.default_branch_name)),
78 status=200)
78 status=200)
79
79
80 @pytest.mark.backends("hg", "git")
80 @pytest.mark.backends("hg", "git")
81 def test_commits_filtered_by_branch(self, backend):
81 def test_commits_filtered_by_branch(self, backend):
82 self.log_user()
82 self.log_user()
83 self.app.get(
83 self.app.get(
84 route_path('repo_commits', repo_name=backend.repo_name,
84 route_path('repo_commits', repo_name=backend.repo_name,
85 params=dict(branch=backend.default_branch_name)),
85 params=dict(branch=backend.default_branch_name)),
86 status=200)
86 status=200)
87
87
88 @pytest.mark.backends("svn")
88 @pytest.mark.backends("svn")
89 def test_changelog_filtered_by_branch_svn(self, autologin_user, backend):
89 def test_changelog_filtered_by_branch_svn(self, autologin_user, backend):
90 repo = backend['svn-simple-layout']
90 repo = backend['svn-simple-layout']
91 response = self.app.get(
91 response = self.app.get(
92 route_path('repo_changelog', repo_name=repo.repo_name,
92 route_path('repo_changelog', repo_name=repo.repo_name,
93 params=dict(branch='trunk')),
93 params=dict(branch='trunk')),
94 status=200)
94 status=200)
95
95
96 assert_commits_on_page(response, indexes=[15, 12, 7, 3, 2, 1])
96 assert_commits_on_page(response, indexes=[15, 12, 7, 3, 2, 1])
97
97
98 def test_commits_filtered_by_wrong_branch(self, backend):
98 def test_commits_filtered_by_wrong_branch(self, backend):
99 self.log_user()
99 self.log_user()
100 branch = 'wrong-branch-name'
100 branch = 'wrong-branch-name'
101 response = self.app.get(
101 response = self.app.get(
102 route_path('repo_commits', repo_name=backend.repo_name,
102 route_path('repo_commits', repo_name=backend.repo_name,
103 params=dict(branch=branch)),
103 params=dict(branch=branch)),
104 status=302)
104 status=302)
105 expected_url = '/{repo}/commits/{branch}'.format(
105 expected_url = '/{repo}/commits/{branch}'.format(
106 repo=backend.repo_name, branch=branch)
106 repo=backend.repo_name, branch=branch)
107 assert expected_url in response.location
107 assert expected_url in response.location
108 response = response.follow()
108 response = response.follow()
109 expected_warning = 'Branch {} is not found.'.format(branch)
109 expected_warning = 'Branch {} is not found.'.format(branch)
110 assert expected_warning in response.body
110 assert expected_warning in response.text
111
111
112 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
112 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
113 def test_changelog_filtered_by_branch_with_merges(
113 def test_changelog_filtered_by_branch_with_merges(
114 self, autologin_user, backend):
114 self, autologin_user, backend):
115
115
116 # Note: The changelog of branch "b" does not contain the commit "a1"
116 # Note: The changelog of branch "b" does not contain the commit "a1"
117 # although this is a parent of commit "b1". And branch "b" has commits
117 # although this is a parent of commit "b1". And branch "b" has commits
118 # which have a smaller index than commit "a1".
118 # which have a smaller index than commit "a1".
119 commits = [
119 commits = [
120 {'message': 'a'},
120 {'message': 'a'},
121 {'message': 'b', 'branch': 'b'},
121 {'message': 'b', 'branch': 'b'},
122 {'message': 'a1', 'parents': ['a']},
122 {'message': 'a1', 'parents': ['a']},
123 {'message': 'b1', 'branch': 'b', 'parents': ['b', 'a1']},
123 {'message': 'b1', 'branch': 'b', 'parents': ['b', 'a1']},
124 ]
124 ]
125 backend.create_repo(commits)
125 backend.create_repo(commits)
126
126
127 self.app.get(
127 self.app.get(
128 route_path('repo_changelog', repo_name=backend.repo_name,
128 route_path('repo_changelog', repo_name=backend.repo_name,
129 params=dict(branch='b')),
129 params=dict(branch='b')),
130 status=200)
130 status=200)
131
131
132 @pytest.mark.backends("hg")
132 @pytest.mark.backends("hg")
133 def test_commits_closed_branches(self, autologin_user, backend):
133 def test_commits_closed_branches(self, autologin_user, backend):
134 repo = backend['closed_branch']
134 repo = backend['closed_branch']
135 response = self.app.get(
135 response = self.app.get(
136 route_path('repo_commits', repo_name=repo.repo_name,
136 route_path('repo_commits', repo_name=repo.repo_name,
137 params=dict(branch='experimental')),
137 params=dict(branch='experimental')),
138 status=200)
138 status=200)
139
139
140 assert_commits_on_page(response, indexes=[3, 1])
140 assert_commits_on_page(response, indexes=[3, 1])
141
141
142 def test_changelog_pagination(self, backend):
142 def test_changelog_pagination(self, backend):
143 self.log_user()
143 self.log_user()
144 # pagination, walk up to page 6
144 # pagination, walk up to page 6
145 changelog_url = route_path(
145 changelog_url = route_path(
146 'repo_commits', repo_name=backend.repo_name)
146 'repo_commits', repo_name=backend.repo_name)
147
147
148 for page in range(1, 7):
148 for page in range(1, 7):
149 response = self.app.get(changelog_url, {'page': page})
149 response = self.app.get(changelog_url, {'page': page})
150
150
151 first_idx = -DEFAULT_CHANGELOG_SIZE * (page - 1) - 1
151 first_idx = -DEFAULT_CHANGELOG_SIZE * (page - 1) - 1
152 last_idx = -DEFAULT_CHANGELOG_SIZE * page
152 last_idx = -DEFAULT_CHANGELOG_SIZE * page
153 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
153 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
154
154
155 def assert_commit_range_on_page(
155 def assert_commit_range_on_page(
156 self, response, first_idx, last_idx, backend):
156 self, response, first_idx, last_idx, backend):
157 input_template = (
157 input_template = (
158 """<input class="commit-range" """
158 """<input class="commit-range" """
159 """data-commit-id="%(raw_id)s" data-commit-idx="%(idx)s" """
159 """data-commit-id="%(raw_id)s" data-commit-idx="%(idx)s" """
160 """data-short-id="%(short_id)s" id="%(raw_id)s" """
160 """data-short-id="%(short_id)s" id="%(raw_id)s" """
161 """name="%(raw_id)s" type="checkbox" value="1" />"""
161 """name="%(raw_id)s" type="checkbox" value="1" />"""
162 )
162 )
163
163
164 commit_span_template = """<span class="commit_hash">r%s:%s</span>"""
164 commit_span_template = """<span class="commit_hash">r%s:%s</span>"""
165 repo = backend.repo
165 repo = backend.repo
166
166
167 first_commit_on_page = repo.get_commit(commit_idx=first_idx)
167 first_commit_on_page = repo.get_commit(commit_idx=first_idx)
168 response.mustcontain(
168 response.mustcontain(
169 input_template % {'raw_id': first_commit_on_page.raw_id,
169 input_template % {'raw_id': first_commit_on_page.raw_id,
170 'idx': first_commit_on_page.idx,
170 'idx': first_commit_on_page.idx,
171 'short_id': first_commit_on_page.short_id})
171 'short_id': first_commit_on_page.short_id})
172
172
173 response.mustcontain(commit_span_template % (
173 response.mustcontain(commit_span_template % (
174 first_commit_on_page.idx, first_commit_on_page.short_id)
174 first_commit_on_page.idx, first_commit_on_page.short_id)
175 )
175 )
176
176
177 last_commit_on_page = repo.get_commit(commit_idx=last_idx)
177 last_commit_on_page = repo.get_commit(commit_idx=last_idx)
178 response.mustcontain(
178 response.mustcontain(
179 input_template % {'raw_id': last_commit_on_page.raw_id,
179 input_template % {'raw_id': last_commit_on_page.raw_id,
180 'idx': last_commit_on_page.idx,
180 'idx': last_commit_on_page.idx,
181 'short_id': last_commit_on_page.short_id})
181 'short_id': last_commit_on_page.short_id})
182 response.mustcontain(commit_span_template % (
182 response.mustcontain(commit_span_template % (
183 last_commit_on_page.idx, last_commit_on_page.short_id)
183 last_commit_on_page.idx, last_commit_on_page.short_id)
184 )
184 )
185
185
186 first_commit_of_next_page = repo.get_commit(commit_idx=last_idx - 1)
186 first_commit_of_next_page = repo.get_commit(commit_idx=last_idx - 1)
187 first_span_of_next_page = commit_span_template % (
187 first_span_of_next_page = commit_span_template % (
188 first_commit_of_next_page.idx, first_commit_of_next_page.short_id)
188 first_commit_of_next_page.idx, first_commit_of_next_page.short_id)
189 assert first_span_of_next_page not in response
189 assert first_span_of_next_page not in response
190
190
191 @pytest.mark.parametrize('test_path', [
191 @pytest.mark.parametrize('test_path', [
192 'vcs/exceptions.py',
192 'vcs/exceptions.py',
193 '/vcs/exceptions.py',
193 '/vcs/exceptions.py',
194 '//vcs/exceptions.py'
194 '//vcs/exceptions.py'
195 ])
195 ])
196 def test_commits_with_filenode(self, backend, test_path):
196 def test_commits_with_filenode(self, backend, test_path):
197 self.log_user()
197 self.log_user()
198 response = self.app.get(
198 response = self.app.get(
199 route_path('repo_commits_file', repo_name=backend.repo_name,
199 route_path('repo_commits_file', repo_name=backend.repo_name,
200 commit_id='tip', f_path=test_path),
200 commit_id='tip', f_path=test_path),
201 )
201 )
202
202
203 # history commits messages
203 # history commits messages
204 response.mustcontain('Added exceptions module, this time for real')
204 response.mustcontain('Added exceptions module, this time for real')
205 response.mustcontain('Added not implemented hg backend test case')
205 response.mustcontain('Added not implemented hg backend test case')
206 response.mustcontain('Added BaseChangeset class')
206 response.mustcontain('Added BaseChangeset class')
207
207
208 def test_commits_with_filenode_that_is_dirnode(self, backend):
208 def test_commits_with_filenode_that_is_dirnode(self, backend):
209 self.log_user()
209 self.log_user()
210 self.app.get(
210 self.app.get(
211 route_path('repo_commits_file', repo_name=backend.repo_name,
211 route_path('repo_commits_file', repo_name=backend.repo_name,
212 commit_id='tip', f_path='/tests'),
212 commit_id='tip', f_path='/tests'),
213 status=302)
213 status=302)
214
214
215 def test_commits_with_filenode_not_existing(self, backend):
215 def test_commits_with_filenode_not_existing(self, backend):
216 self.log_user()
216 self.log_user()
217 self.app.get(
217 self.app.get(
218 route_path('repo_commits_file', repo_name=backend.repo_name,
218 route_path('repo_commits_file', repo_name=backend.repo_name,
219 commit_id='tip', f_path='wrong_path'),
219 commit_id='tip', f_path='wrong_path'),
220 status=302)
220 status=302)
@@ -1,327 +1,327 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
23 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
24 from rhodecode.lib.helpers import _shorten_commit_id
24 from rhodecode.lib.helpers import _shorten_commit_id
25
25
26
26
27 def route_path(name, params=None, **kwargs):
27 def route_path(name, params=None, **kwargs):
28 import urllib.request, urllib.parse, urllib.error
28 import urllib.request, urllib.parse, urllib.error
29
29
30 base_url = {
30 base_url = {
31 'repo_commit': '/{repo_name}/changeset/{commit_id}',
31 'repo_commit': '/{repo_name}/changeset/{commit_id}',
32 'repo_commit_children': '/{repo_name}/changeset_children/{commit_id}',
32 'repo_commit_children': '/{repo_name}/changeset_children/{commit_id}',
33 'repo_commit_parents': '/{repo_name}/changeset_parents/{commit_id}',
33 'repo_commit_parents': '/{repo_name}/changeset_parents/{commit_id}',
34 'repo_commit_raw': '/{repo_name}/changeset-diff/{commit_id}',
34 'repo_commit_raw': '/{repo_name}/changeset-diff/{commit_id}',
35 'repo_commit_patch': '/{repo_name}/changeset-patch/{commit_id}',
35 'repo_commit_patch': '/{repo_name}/changeset-patch/{commit_id}',
36 'repo_commit_download': '/{repo_name}/changeset-download/{commit_id}',
36 'repo_commit_download': '/{repo_name}/changeset-download/{commit_id}',
37 'repo_commit_data': '/{repo_name}/changeset-data/{commit_id}',
37 'repo_commit_data': '/{repo_name}/changeset-data/{commit_id}',
38 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
38 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
39 }[name].format(**kwargs)
39 }[name].format(**kwargs)
40
40
41 if params:
41 if params:
42 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
42 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
43 return base_url
43 return base_url
44
44
45
45
46 @pytest.mark.usefixtures("app")
46 @pytest.mark.usefixtures("app")
47 class TestRepoCommitView(object):
47 class TestRepoCommitView(object):
48
48
49 def test_show_commit(self, backend):
49 def test_show_commit(self, backend):
50 commit_id = self.commit_id[backend.alias]
50 commit_id = self.commit_id[backend.alias]
51 response = self.app.get(route_path(
51 response = self.app.get(route_path(
52 'repo_commit', repo_name=backend.repo_name, commit_id=commit_id))
52 'repo_commit', repo_name=backend.repo_name, commit_id=commit_id))
53 response.mustcontain('Added a symlink')
53 response.mustcontain('Added a symlink')
54 response.mustcontain(commit_id)
54 response.mustcontain(commit_id)
55 response.mustcontain('No newline at end of file')
55 response.mustcontain('No newline at end of file')
56
56
57 def test_show_raw(self, backend):
57 def test_show_raw(self, backend):
58 commit_id = self.commit_id[backend.alias]
58 commit_id = self.commit_id[backend.alias]
59 response = self.app.get(route_path(
59 response = self.app.get(route_path(
60 'repo_commit_raw',
60 'repo_commit_raw',
61 repo_name=backend.repo_name, commit_id=commit_id))
61 repo_name=backend.repo_name, commit_id=commit_id))
62 assert response.body == self.diffs[backend.alias]
62 assert response.text == self.diffs[backend.alias]
63
63
64 def test_show_raw_patch(self, backend):
64 def test_show_raw_patch(self, backend):
65 response = self.app.get(route_path(
65 response = self.app.get(route_path(
66 'repo_commit_patch', repo_name=backend.repo_name,
66 'repo_commit_patch', repo_name=backend.repo_name,
67 commit_id=self.commit_id[backend.alias]))
67 commit_id=self.commit_id[backend.alias]))
68 assert response.body == self.patches[backend.alias]
68 assert response.text == self.patches[backend.alias]
69
69
70 def test_commit_download(self, backend):
70 def test_commit_download(self, backend):
71 response = self.app.get(route_path(
71 response = self.app.get(route_path(
72 'repo_commit_download',
72 'repo_commit_download',
73 repo_name=backend.repo_name,
73 repo_name=backend.repo_name,
74 commit_id=self.commit_id[backend.alias]))
74 commit_id=self.commit_id[backend.alias]))
75 assert response.body == self.diffs[backend.alias]
75 assert response.text == self.diffs[backend.alias]
76
76
77 def test_single_commit_page_different_ops(self, backend):
77 def test_single_commit_page_different_ops(self, backend):
78 commit_id = {
78 commit_id = {
79 'hg': '603d6c72c46d953420c89d36372f08d9f305f5dd',
79 'hg': '603d6c72c46d953420c89d36372f08d9f305f5dd',
80 'git': '03fa803d7e9fb14daa9a3089e0d1494eda75d986',
80 'git': '03fa803d7e9fb14daa9a3089e0d1494eda75d986',
81 'svn': '337',
81 'svn': '337',
82 }
82 }
83 diff_stat = {
83 diff_stat = {
84 'hg': (21, 943, 288),
84 'hg': (21, 943, 288),
85 'git': (20, 941, 286),
85 'git': (20, 941, 286),
86 'svn': (21, 943, 288),
86 'svn': (21, 943, 288),
87 }
87 }
88
88
89 commit_id = commit_id[backend.alias]
89 commit_id = commit_id[backend.alias]
90 response = self.app.get(route_path(
90 response = self.app.get(route_path(
91 'repo_commit',
91 'repo_commit',
92 repo_name=backend.repo_name, commit_id=commit_id))
92 repo_name=backend.repo_name, commit_id=commit_id))
93
93
94 response.mustcontain(_shorten_commit_id(commit_id))
94 response.mustcontain(_shorten_commit_id(commit_id))
95
95
96 compare_page = ComparePage(response)
96 compare_page = ComparePage(response)
97 file_changes = diff_stat[backend.alias]
97 file_changes = diff_stat[backend.alias]
98 compare_page.contains_change_summary(*file_changes)
98 compare_page.contains_change_summary(*file_changes)
99
99
100 # files op files
100 # files op files
101 response.mustcontain('File not present at commit: %s' %
101 response.mustcontain('File not present at commit: %s' %
102 _shorten_commit_id(commit_id))
102 _shorten_commit_id(commit_id))
103
103
104 # svn uses a different filename
104 # svn uses a different filename
105 if backend.alias == 'svn':
105 if backend.alias == 'svn':
106 response.mustcontain('new file 10644')
106 response.mustcontain('new file 10644')
107 else:
107 else:
108 response.mustcontain('new file 100644')
108 response.mustcontain('new file 100644')
109 response.mustcontain('Changed theme to ADC theme') # commit msg
109 response.mustcontain('Changed theme to ADC theme') # commit msg
110
110
111 self._check_new_diff_menus(response, right_menu=True)
111 self._check_new_diff_menus(response, right_menu=True)
112
112
113 def test_commit_range_page_different_ops(self, backend):
113 def test_commit_range_page_different_ops(self, backend):
114 commit_id_range = {
114 commit_id_range = {
115 'hg': (
115 'hg': (
116 '25d7e49c18b159446cadfa506a5cf8ad1cb04067',
116 '25d7e49c18b159446cadfa506a5cf8ad1cb04067',
117 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
117 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
118 'git': (
118 'git': (
119 '6fc9270775aaf5544c1deb014f4ddd60c952fcbb',
119 '6fc9270775aaf5544c1deb014f4ddd60c952fcbb',
120 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
120 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
121 'svn': (
121 'svn': (
122 '335',
122 '335',
123 '337'),
123 '337'),
124 }
124 }
125 commit_ids = commit_id_range[backend.alias]
125 commit_ids = commit_id_range[backend.alias]
126 commit_id = '%s...%s' % (commit_ids[0], commit_ids[1])
126 commit_id = '%s...%s' % (commit_ids[0], commit_ids[1])
127 response = self.app.get(route_path(
127 response = self.app.get(route_path(
128 'repo_commit',
128 'repo_commit',
129 repo_name=backend.repo_name, commit_id=commit_id))
129 repo_name=backend.repo_name, commit_id=commit_id))
130
130
131 response.mustcontain(_shorten_commit_id(commit_ids[0]))
131 response.mustcontain(_shorten_commit_id(commit_ids[0]))
132 response.mustcontain(_shorten_commit_id(commit_ids[1]))
132 response.mustcontain(_shorten_commit_id(commit_ids[1]))
133
133
134 compare_page = ComparePage(response)
134 compare_page = ComparePage(response)
135
135
136 # svn is special
136 # svn is special
137 if backend.alias == 'svn':
137 if backend.alias == 'svn':
138 response.mustcontain('new file 10644')
138 response.mustcontain('new file 10644')
139 for file_changes in [(1, 5, 1), (12, 236, 22), (21, 943, 288)]:
139 for file_changes in [(1, 5, 1), (12, 236, 22), (21, 943, 288)]:
140 compare_page.contains_change_summary(*file_changes)
140 compare_page.contains_change_summary(*file_changes)
141 elif backend.alias == 'git':
141 elif backend.alias == 'git':
142 response.mustcontain('new file 100644')
142 response.mustcontain('new file 100644')
143 for file_changes in [(12, 222, 20), (20, 941, 286)]:
143 for file_changes in [(12, 222, 20), (20, 941, 286)]:
144 compare_page.contains_change_summary(*file_changes)
144 compare_page.contains_change_summary(*file_changes)
145 else:
145 else:
146 response.mustcontain('new file 100644')
146 response.mustcontain('new file 100644')
147 for file_changes in [(12, 222, 20), (21, 943, 288)]:
147 for file_changes in [(12, 222, 20), (21, 943, 288)]:
148 compare_page.contains_change_summary(*file_changes)
148 compare_page.contains_change_summary(*file_changes)
149
149
150 # files op files
150 # files op files
151 response.mustcontain('File not present at commit: %s' % _shorten_commit_id(commit_ids[1]))
151 response.mustcontain('File not present at commit: %s' % _shorten_commit_id(commit_ids[1]))
152 response.mustcontain('Added docstrings to vcs.cli') # commit msg
152 response.mustcontain('Added docstrings to vcs.cli') # commit msg
153 response.mustcontain('Changed theme to ADC theme') # commit msg
153 response.mustcontain('Changed theme to ADC theme') # commit msg
154
154
155 self._check_new_diff_menus(response)
155 self._check_new_diff_menus(response)
156
156
157 def test_combined_compare_commit_page_different_ops(self, backend):
157 def test_combined_compare_commit_page_different_ops(self, backend):
158 commit_id_range = {
158 commit_id_range = {
159 'hg': (
159 'hg': (
160 '4fdd71e9427417b2e904e0464c634fdee85ec5a7',
160 '4fdd71e9427417b2e904e0464c634fdee85ec5a7',
161 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
161 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
162 'git': (
162 'git': (
163 'f5fbf9cfd5f1f1be146f6d3b38bcd791a7480c13',
163 'f5fbf9cfd5f1f1be146f6d3b38bcd791a7480c13',
164 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
164 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
165 'svn': (
165 'svn': (
166 '335',
166 '335',
167 '337'),
167 '337'),
168 }
168 }
169 commit_ids = commit_id_range[backend.alias]
169 commit_ids = commit_id_range[backend.alias]
170 response = self.app.get(route_path(
170 response = self.app.get(route_path(
171 'repo_compare',
171 'repo_compare',
172 repo_name=backend.repo_name,
172 repo_name=backend.repo_name,
173 source_ref_type='rev', source_ref=commit_ids[0],
173 source_ref_type='rev', source_ref=commit_ids[0],
174 target_ref_type='rev', target_ref=commit_ids[1], ))
174 target_ref_type='rev', target_ref=commit_ids[1], ))
175
175
176 response.mustcontain(_shorten_commit_id(commit_ids[0]))
176 response.mustcontain(_shorten_commit_id(commit_ids[0]))
177 response.mustcontain(_shorten_commit_id(commit_ids[1]))
177 response.mustcontain(_shorten_commit_id(commit_ids[1]))
178
178
179 # files op files
179 # files op files
180 response.mustcontain('File not present at commit: %s' %
180 response.mustcontain('File not present at commit: %s' %
181 _shorten_commit_id(commit_ids[1]))
181 _shorten_commit_id(commit_ids[1]))
182
182
183 compare_page = ComparePage(response)
183 compare_page = ComparePage(response)
184
184
185 # svn is special
185 # svn is special
186 if backend.alias == 'svn':
186 if backend.alias == 'svn':
187 response.mustcontain('new file 10644')
187 response.mustcontain('new file 10644')
188 file_changes = (32, 1179, 310)
188 file_changes = (32, 1179, 310)
189 compare_page.contains_change_summary(*file_changes)
189 compare_page.contains_change_summary(*file_changes)
190 elif backend.alias == 'git':
190 elif backend.alias == 'git':
191 response.mustcontain('new file 100644')
191 response.mustcontain('new file 100644')
192 file_changes = (31, 1163, 306)
192 file_changes = (31, 1163, 306)
193 compare_page.contains_change_summary(*file_changes)
193 compare_page.contains_change_summary(*file_changes)
194 else:
194 else:
195 response.mustcontain('new file 100644')
195 response.mustcontain('new file 100644')
196 file_changes = (32, 1165, 308)
196 file_changes = (32, 1165, 308)
197 compare_page.contains_change_summary(*file_changes)
197 compare_page.contains_change_summary(*file_changes)
198
198
199 response.mustcontain('Added docstrings to vcs.cli') # commit msg
199 response.mustcontain('Added docstrings to vcs.cli') # commit msg
200 response.mustcontain('Changed theme to ADC theme') # commit msg
200 response.mustcontain('Changed theme to ADC theme') # commit msg
201
201
202 self._check_new_diff_menus(response)
202 self._check_new_diff_menus(response)
203
203
204 def test_changeset_range(self, backend):
204 def test_changeset_range(self, backend):
205 self._check_changeset_range(
205 self._check_changeset_range(
206 backend, self.commit_id_range, self.commit_id_range_result)
206 backend, self.commit_id_range, self.commit_id_range_result)
207
207
208 def test_changeset_range_with_initial_commit(self, backend):
208 def test_changeset_range_with_initial_commit(self, backend):
209 commit_id_range = {
209 commit_id_range = {
210 'hg': (
210 'hg': (
211 'b986218ba1c9b0d6a259fac9b050b1724ed8e545'
211 'b986218ba1c9b0d6a259fac9b050b1724ed8e545'
212 '...6cba7170863a2411822803fa77a0a264f1310b35'),
212 '...6cba7170863a2411822803fa77a0a264f1310b35'),
213 'git': (
213 'git': (
214 'c1214f7e79e02fc37156ff215cd71275450cffc3'
214 'c1214f7e79e02fc37156ff215cd71275450cffc3'
215 '...fa6600f6848800641328adbf7811fd2372c02ab2'),
215 '...fa6600f6848800641328adbf7811fd2372c02ab2'),
216 'svn': '1...3',
216 'svn': '1...3',
217 }
217 }
218 commit_id_range_result = {
218 commit_id_range_result = {
219 'hg': ['b986218ba1c9', '3d8f361e72ab', '6cba7170863a'],
219 'hg': ['b986218ba1c9', '3d8f361e72ab', '6cba7170863a'],
220 'git': ['c1214f7e79e0', '38b5fe81f109', 'fa6600f68488'],
220 'git': ['c1214f7e79e0', '38b5fe81f109', 'fa6600f68488'],
221 'svn': ['1', '2', '3'],
221 'svn': ['1', '2', '3'],
222 }
222 }
223 self._check_changeset_range(
223 self._check_changeset_range(
224 backend, commit_id_range, commit_id_range_result)
224 backend, commit_id_range, commit_id_range_result)
225
225
226 def _check_changeset_range(
226 def _check_changeset_range(
227 self, backend, commit_id_ranges, commit_id_range_result):
227 self, backend, commit_id_ranges, commit_id_range_result):
228 response = self.app.get(
228 response = self.app.get(
229 route_path('repo_commit',
229 route_path('repo_commit',
230 repo_name=backend.repo_name,
230 repo_name=backend.repo_name,
231 commit_id=commit_id_ranges[backend.alias]))
231 commit_id=commit_id_ranges[backend.alias]))
232
232
233 expected_result = commit_id_range_result[backend.alias]
233 expected_result = commit_id_range_result[backend.alias]
234 response.mustcontain('{} commits'.format(len(expected_result)))
234 response.mustcontain('{} commits'.format(len(expected_result)))
235 for commit_id in expected_result:
235 for commit_id in expected_result:
236 response.mustcontain(commit_id)
236 response.mustcontain(commit_id)
237
237
238 commit_id = {
238 commit_id = {
239 'hg': '2062ec7beeeaf9f44a1c25c41479565040b930b2',
239 'hg': '2062ec7beeeaf9f44a1c25c41479565040b930b2',
240 'svn': '393',
240 'svn': '393',
241 'git': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
241 'git': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
242 }
242 }
243
243
244 commit_id_range = {
244 commit_id_range = {
245 'hg': (
245 'hg': (
246 'a53d9201d4bc278910d416d94941b7ea007ecd52'
246 'a53d9201d4bc278910d416d94941b7ea007ecd52'
247 '...2062ec7beeeaf9f44a1c25c41479565040b930b2'),
247 '...2062ec7beeeaf9f44a1c25c41479565040b930b2'),
248 'git': (
248 'git': (
249 '7ab37bc680b4aa72c34d07b230c866c28e9fc204'
249 '7ab37bc680b4aa72c34d07b230c866c28e9fc204'
250 '...fd627b9e0dd80b47be81af07c4a98518244ed2f7'),
250 '...fd627b9e0dd80b47be81af07c4a98518244ed2f7'),
251 'svn': '391...393',
251 'svn': '391...393',
252 }
252 }
253
253
254 commit_id_range_result = {
254 commit_id_range_result = {
255 'hg': ['a53d9201d4bc', '96507bd11ecc', '2062ec7beeea'],
255 'hg': ['a53d9201d4bc', '96507bd11ecc', '2062ec7beeea'],
256 'git': ['7ab37bc680b4', '5f2c6ee19592', 'fd627b9e0dd8'],
256 'git': ['7ab37bc680b4', '5f2c6ee19592', 'fd627b9e0dd8'],
257 'svn': ['391', '392', '393'],
257 'svn': ['391', '392', '393'],
258 }
258 }
259
259
260 diffs = {
260 diffs = {
261 'hg': r"""diff --git a/README b/README
261 'hg': r"""diff --git a/README b/README
262 new file mode 120000
262 new file mode 120000
263 --- /dev/null
263 --- /dev/null
264 +++ b/README
264 +++ b/README
265 @@ -0,0 +1,1 @@
265 @@ -0,0 +1,1 @@
266 +README.rst
266 +README.rst
267 \ No newline at end of file
267 \ No newline at end of file
268 """,
268 """,
269 'git': r"""diff --git a/README b/README
269 'git': r"""diff --git a/README b/README
270 new file mode 120000
270 new file mode 120000
271 index 0000000..92cacd2
271 index 0000000..92cacd2
272 --- /dev/null
272 --- /dev/null
273 +++ b/README
273 +++ b/README
274 @@ -0,0 +1 @@
274 @@ -0,0 +1 @@
275 +README.rst
275 +README.rst
276 \ No newline at end of file
276 \ No newline at end of file
277 """,
277 """,
278 'svn': """Index: README
278 'svn': """Index: README
279 ===================================================================
279 ===================================================================
280 diff --git a/README b/README
280 diff --git a/README b/README
281 new file mode 10644
281 new file mode 10644
282 --- /dev/null\t(revision 0)
282 --- /dev/null\t(revision 0)
283 +++ b/README\t(revision 393)
283 +++ b/README\t(revision 393)
284 @@ -0,0 +1 @@
284 @@ -0,0 +1 @@
285 +link README.rst
285 +link README.rst
286 \\ No newline at end of file
286 \\ No newline at end of file
287 """,
287 """,
288 }
288 }
289
289
290 patches = {
290 patches = {
291 'hg': r"""# HG changeset patch
291 'hg': r"""# HG changeset patch
292 # User Marcin Kuzminski <marcin@python-works.com>
292 # User Marcin Kuzminski <marcin@python-works.com>
293 # Date 2014-01-07 12:21:40
293 # Date 2014-01-07 12:21:40
294 # Node ID 2062ec7beeeaf9f44a1c25c41479565040b930b2
294 # Node ID 2062ec7beeeaf9f44a1c25c41479565040b930b2
295 # Parent 96507bd11ecc815ebc6270fdf6db110928c09c1e
295 # Parent 96507bd11ecc815ebc6270fdf6db110928c09c1e
296
296
297 Added a symlink
297 Added a symlink
298
298
299 """ + diffs['hg'],
299 """ + diffs['hg'],
300 'git': r"""From fd627b9e0dd80b47be81af07c4a98518244ed2f7 2014-01-07 12:22:20
300 'git': r"""From fd627b9e0dd80b47be81af07c4a98518244ed2f7 2014-01-07 12:22:20
301 From: Marcin Kuzminski <marcin@python-works.com>
301 From: Marcin Kuzminski <marcin@python-works.com>
302 Date: 2014-01-07 12:22:20
302 Date: 2014-01-07 12:22:20
303 Subject: [PATCH] Added a symlink
303 Subject: [PATCH] Added a symlink
304
304
305 ---
305 ---
306
306
307 """ + diffs['git'],
307 """ + diffs['git'],
308 'svn': r"""# SVN changeset patch
308 'svn': r"""# SVN changeset patch
309 # User marcin
309 # User marcin
310 # Date 2014-09-02 12:25:22.071142
310 # Date 2014-09-02 12:25:22.071142
311 # Revision 393
311 # Revision 393
312
312
313 Added a symlink
313 Added a symlink
314
314
315 """ + diffs['svn'],
315 """ + diffs['svn'],
316 }
316 }
317
317
318 def _check_new_diff_menus(self, response, right_menu=False,):
318 def _check_new_diff_menus(self, response, right_menu=False,):
319 # individual file diff menus
319 # individual file diff menus
320 for elem in ['Show file before', 'Show file after']:
320 for elem in ['Show file before', 'Show file after']:
321 response.mustcontain(elem)
321 response.mustcontain(elem)
322
322
323 # right pane diff menus
323 # right pane diff menus
324 if right_menu:
324 if right_menu:
325 for elem in ['Hide whitespace changes', 'Toggle wide diff',
325 for elem in ['Hide whitespace changes', 'Toggle wide diff',
326 'Show full context diff']:
326 'Show full context diff']:
327 response.mustcontain(elem)
327 response.mustcontain(elem)
@@ -1,672 +1,672 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import lxml.html
23 import lxml.html
24
24
25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
26 from rhodecode.tests import assert_session_flash
26 from rhodecode.tests import assert_session_flash
27 from rhodecode.tests.utils import AssertResponse, commit_change
27 from rhodecode.tests.utils import AssertResponse, commit_change
28
28
29
29
30 def route_path(name, params=None, **kwargs):
30 def route_path(name, params=None, **kwargs):
31 import urllib.request, urllib.parse, urllib.error
31 import urllib.request, urllib.parse, urllib.error
32
32
33 base_url = {
33 base_url = {
34 'repo_compare_select': '/{repo_name}/compare',
34 'repo_compare_select': '/{repo_name}/compare',
35 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
35 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
36 }[name].format(**kwargs)
36 }[name].format(**kwargs)
37
37
38 if params:
38 if params:
39 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
39 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
40 return base_url
40 return base_url
41
41
42
42
43 @pytest.mark.usefixtures("autologin_user", "app")
43 @pytest.mark.usefixtures("autologin_user", "app")
44 class TestCompareView(object):
44 class TestCompareView(object):
45
45
46 def test_compare_index_is_reached_at_least_once(self, backend):
46 def test_compare_index_is_reached_at_least_once(self, backend):
47 repo = backend.repo
47 repo = backend.repo
48 self.app.get(
48 self.app.get(
49 route_path('repo_compare_select', repo_name=repo.repo_name))
49 route_path('repo_compare_select', repo_name=repo.repo_name))
50
50
51 @pytest.mark.xfail_backends("svn", reason="Requires pull")
51 @pytest.mark.xfail_backends("svn", reason="Requires pull")
52 def test_compare_remote_with_different_commit_indexes(self, backend):
52 def test_compare_remote_with_different_commit_indexes(self, backend):
53 # Preparing the following repository structure:
53 # Preparing the following repository structure:
54 #
54 #
55 # Origin repository has two commits:
55 # Origin repository has two commits:
56 #
56 #
57 # 0 1
57 # 0 1
58 # A -- D
58 # A -- D
59 #
59 #
60 # The fork of it has a few more commits and "D" has a commit index
60 # The fork of it has a few more commits and "D" has a commit index
61 # which does not exist in origin.
61 # which does not exist in origin.
62 #
62 #
63 # 0 1 2 3 4
63 # 0 1 2 3 4
64 # A -- -- -- D -- E
64 # A -- -- -- D -- E
65 # \- B -- C
65 # \- B -- C
66 #
66 #
67
67
68 fork = backend.create_repo()
68 fork = backend.create_repo()
69
69
70 # prepare fork
70 # prepare fork
71 commit0 = commit_change(
71 commit0 = commit_change(
72 fork.repo_name, filename='file1', content='A',
72 fork.repo_name, filename='file1', content='A',
73 message='A', vcs_type=backend.alias, parent=None, newfile=True)
73 message='A', vcs_type=backend.alias, parent=None, newfile=True)
74
74
75 commit1 = commit_change(
75 commit1 = commit_change(
76 fork.repo_name, filename='file1', content='B',
76 fork.repo_name, filename='file1', content='B',
77 message='B, child of A', vcs_type=backend.alias, parent=commit0)
77 message='B, child of A', vcs_type=backend.alias, parent=commit0)
78
78
79 commit_change( # commit 2
79 commit_change( # commit 2
80 fork.repo_name, filename='file1', content='C',
80 fork.repo_name, filename='file1', content='C',
81 message='C, child of B', vcs_type=backend.alias, parent=commit1)
81 message='C, child of B', vcs_type=backend.alias, parent=commit1)
82
82
83 commit3 = commit_change(
83 commit3 = commit_change(
84 fork.repo_name, filename='file1', content='D',
84 fork.repo_name, filename='file1', content='D',
85 message='D, child of A', vcs_type=backend.alias, parent=commit0)
85 message='D, child of A', vcs_type=backend.alias, parent=commit0)
86
86
87 commit4 = commit_change(
87 commit4 = commit_change(
88 fork.repo_name, filename='file1', content='E',
88 fork.repo_name, filename='file1', content='E',
89 message='E, child of D', vcs_type=backend.alias, parent=commit3)
89 message='E, child of D', vcs_type=backend.alias, parent=commit3)
90
90
91 # prepare origin repository, taking just the history up to D
91 # prepare origin repository, taking just the history up to D
92 origin = backend.create_repo()
92 origin = backend.create_repo()
93
93
94 origin_repo = origin.scm_instance(cache=False)
94 origin_repo = origin.scm_instance(cache=False)
95 origin_repo.config.clear_section('hooks')
95 origin_repo.config.clear_section('hooks')
96 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
96 origin_repo.pull(fork.repo_full_path, commit_ids=[commit3.raw_id])
97 origin_repo = origin.scm_instance(cache=False) # cache rebuild
97 origin_repo = origin.scm_instance(cache=False) # cache rebuild
98
98
99 # Verify test fixture setup
99 # Verify test fixture setup
100 # This does not work for git
100 # This does not work for git
101 if backend.alias != 'git':
101 if backend.alias != 'git':
102 assert 5 == len(fork.scm_instance().commit_ids)
102 assert 5 == len(fork.scm_instance().commit_ids)
103 assert 2 == len(origin_repo.commit_ids)
103 assert 2 == len(origin_repo.commit_ids)
104
104
105 # Comparing the revisions
105 # Comparing the revisions
106 response = self.app.get(
106 response = self.app.get(
107 route_path('repo_compare',
107 route_path('repo_compare',
108 repo_name=origin.repo_name,
108 repo_name=origin.repo_name,
109 source_ref_type="rev", source_ref=commit3.raw_id,
109 source_ref_type="rev", source_ref=commit3.raw_id,
110 target_ref_type="rev", target_ref=commit4.raw_id,
110 target_ref_type="rev", target_ref=commit4.raw_id,
111 params=dict(merge='1', target_repo=fork.repo_name)
111 params=dict(merge='1', target_repo=fork.repo_name)
112 ))
112 ))
113
113
114 compare_page = ComparePage(response)
114 compare_page = ComparePage(response)
115 compare_page.contains_commits([commit4])
115 compare_page.contains_commits([commit4])
116
116
117 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
117 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
118 def test_compare_forks_on_branch_extra_commits(self, backend):
118 def test_compare_forks_on_branch_extra_commits(self, backend):
119 repo1 = backend.create_repo()
119 repo1 = backend.create_repo()
120
120
121 # commit something !
121 # commit something !
122 commit0 = commit_change(
122 commit0 = commit_change(
123 repo1.repo_name, filename='file1', content='line1\n',
123 repo1.repo_name, filename='file1', content='line1\n',
124 message='commit1', vcs_type=backend.alias, parent=None,
124 message='commit1', vcs_type=backend.alias, parent=None,
125 newfile=True)
125 newfile=True)
126
126
127 # fork this repo
127 # fork this repo
128 repo2 = backend.create_fork()
128 repo2 = backend.create_fork()
129
129
130 # add two extra commit into fork
130 # add two extra commit into fork
131 commit1 = commit_change(
131 commit1 = commit_change(
132 repo2.repo_name, filename='file1', content='line1\nline2\n',
132 repo2.repo_name, filename='file1', content='line1\nline2\n',
133 message='commit2', vcs_type=backend.alias, parent=commit0)
133 message='commit2', vcs_type=backend.alias, parent=commit0)
134
134
135 commit2 = commit_change(
135 commit2 = commit_change(
136 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
136 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
137 message='commit3', vcs_type=backend.alias, parent=commit1)
137 message='commit3', vcs_type=backend.alias, parent=commit1)
138
138
139 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
139 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
140 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
140 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
141
141
142 response = self.app.get(
142 response = self.app.get(
143 route_path('repo_compare',
143 route_path('repo_compare',
144 repo_name=repo1.repo_name,
144 repo_name=repo1.repo_name,
145 source_ref_type="branch", source_ref=commit_id2,
145 source_ref_type="branch", source_ref=commit_id2,
146 target_ref_type="branch", target_ref=commit_id1,
146 target_ref_type="branch", target_ref=commit_id1,
147 params=dict(merge='1', target_repo=repo2.repo_name)
147 params=dict(merge='1', target_repo=repo2.repo_name)
148 ))
148 ))
149
149
150 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
150 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
151 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
151 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
152
152
153 compare_page = ComparePage(response)
153 compare_page = ComparePage(response)
154 compare_page.contains_change_summary(1, 2, 0)
154 compare_page.contains_change_summary(1, 2, 0)
155 compare_page.contains_commits([commit1, commit2])
155 compare_page.contains_commits([commit1, commit2])
156
156
157 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
157 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
158 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
158 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
159
159
160 # Swap is removed when comparing branches since it's a PR feature and
160 # Swap is removed when comparing branches since it's a PR feature and
161 # it is then a preview mode
161 # it is then a preview mode
162 compare_page.swap_is_hidden()
162 compare_page.swap_is_hidden()
163 compare_page.target_source_are_disabled()
163 compare_page.target_source_are_disabled()
164
164
165 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
165 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
166 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(self, backend):
166 def test_compare_forks_on_branch_extra_commits_origin_has_incomming(self, backend):
167 repo1 = backend.create_repo()
167 repo1 = backend.create_repo()
168
168
169 # commit something !
169 # commit something !
170 commit0 = commit_change(
170 commit0 = commit_change(
171 repo1.repo_name, filename='file1', content='line1\n',
171 repo1.repo_name, filename='file1', content='line1\n',
172 message='commit1', vcs_type=backend.alias, parent=None,
172 message='commit1', vcs_type=backend.alias, parent=None,
173 newfile=True)
173 newfile=True)
174
174
175 # fork this repo
175 # fork this repo
176 repo2 = backend.create_fork()
176 repo2 = backend.create_fork()
177
177
178 # now commit something to origin repo
178 # now commit something to origin repo
179 commit_change(
179 commit_change(
180 repo1.repo_name, filename='file2', content='line1file2\n',
180 repo1.repo_name, filename='file2', content='line1file2\n',
181 message='commit2', vcs_type=backend.alias, parent=commit0,
181 message='commit2', vcs_type=backend.alias, parent=commit0,
182 newfile=True)
182 newfile=True)
183
183
184 # add two extra commit into fork
184 # add two extra commit into fork
185 commit1 = commit_change(
185 commit1 = commit_change(
186 repo2.repo_name, filename='file1', content='line1\nline2\n',
186 repo2.repo_name, filename='file1', content='line1\nline2\n',
187 message='commit2', vcs_type=backend.alias, parent=commit0)
187 message='commit2', vcs_type=backend.alias, parent=commit0)
188
188
189 commit2 = commit_change(
189 commit2 = commit_change(
190 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
190 repo2.repo_name, filename='file1', content='line1\nline2\nline3\n',
191 message='commit3', vcs_type=backend.alias, parent=commit1)
191 message='commit3', vcs_type=backend.alias, parent=commit1)
192
192
193 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
193 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
194 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
194 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
195
195
196 response = self.app.get(
196 response = self.app.get(
197 route_path('repo_compare',
197 route_path('repo_compare',
198 repo_name=repo1.repo_name,
198 repo_name=repo1.repo_name,
199 source_ref_type="branch", source_ref=commit_id2,
199 source_ref_type="branch", source_ref=commit_id2,
200 target_ref_type="branch", target_ref=commit_id1,
200 target_ref_type="branch", target_ref=commit_id1,
201 params=dict(merge='1', target_repo=repo2.repo_name),
201 params=dict(merge='1', target_repo=repo2.repo_name),
202 ))
202 ))
203
203
204 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
204 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id2))
205 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
205 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id1))
206
206
207 compare_page = ComparePage(response)
207 compare_page = ComparePage(response)
208 compare_page.contains_change_summary(1, 2, 0)
208 compare_page.contains_change_summary(1, 2, 0)
209 compare_page.contains_commits([commit1, commit2])
209 compare_page.contains_commits([commit1, commit2])
210 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
210 anchor = 'a_c-{}-826e8142e6ba'.format(commit0.short_id)
211 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
211 compare_page.contains_file_links_and_anchors([('file1', anchor), ])
212
212
213 # Swap is removed when comparing branches since it's a PR feature and
213 # Swap is removed when comparing branches since it's a PR feature and
214 # it is then a preview mode
214 # it is then a preview mode
215 compare_page.swap_is_hidden()
215 compare_page.swap_is_hidden()
216 compare_page.target_source_are_disabled()
216 compare_page.target_source_are_disabled()
217
217
218 @pytest.mark.xfail_backends("svn")
218 @pytest.mark.xfail_backends("svn")
219 # TODO(marcink): no svn support for compare two seperate repos
219 # TODO(marcink): no svn support for compare two seperate repos
220 def test_compare_of_unrelated_forks(self, backend):
220 def test_compare_of_unrelated_forks(self, backend):
221 orig = backend.create_repo(number_of_commits=1)
221 orig = backend.create_repo(number_of_commits=1)
222 fork = backend.create_repo(number_of_commits=1)
222 fork = backend.create_repo(number_of_commits=1)
223
223
224 response = self.app.get(
224 response = self.app.get(
225 route_path('repo_compare',
225 route_path('repo_compare',
226 repo_name=orig.repo_name,
226 repo_name=orig.repo_name,
227 source_ref_type="rev", source_ref="tip",
227 source_ref_type="rev", source_ref="tip",
228 target_ref_type="rev", target_ref="tip",
228 target_ref_type="rev", target_ref="tip",
229 params=dict(merge='1', target_repo=fork.repo_name),
229 params=dict(merge='1', target_repo=fork.repo_name),
230 ),
230 ),
231 status=302)
231 status=302)
232 response = response.follow()
232 response = response.follow()
233 response.mustcontain("Repositories unrelated.")
233 response.mustcontain("Repositories unrelated.")
234
234
235 @pytest.mark.xfail_backends("svn")
235 @pytest.mark.xfail_backends("svn")
236 def test_compare_cherry_pick_commits_from_bottom(self, backend):
236 def test_compare_cherry_pick_commits_from_bottom(self, backend):
237
237
238 # repo1:
238 # repo1:
239 # commit0:
239 # commit0:
240 # commit1:
240 # commit1:
241 # repo1-fork- in which we will cherry pick bottom commits
241 # repo1-fork- in which we will cherry pick bottom commits
242 # commit0:
242 # commit0:
243 # commit1:
243 # commit1:
244 # commit2: x
244 # commit2: x
245 # commit3: x
245 # commit3: x
246 # commit4: x
246 # commit4: x
247 # commit5:
247 # commit5:
248 # make repo1, and commit1+commit2
248 # make repo1, and commit1+commit2
249
249
250 repo1 = backend.create_repo()
250 repo1 = backend.create_repo()
251
251
252 # commit something !
252 # commit something !
253 commit0 = commit_change(
253 commit0 = commit_change(
254 repo1.repo_name, filename='file1', content='line1\n',
254 repo1.repo_name, filename='file1', content='line1\n',
255 message='commit1', vcs_type=backend.alias, parent=None,
255 message='commit1', vcs_type=backend.alias, parent=None,
256 newfile=True)
256 newfile=True)
257 commit1 = commit_change(
257 commit1 = commit_change(
258 repo1.repo_name, filename='file1', content='line1\nline2\n',
258 repo1.repo_name, filename='file1', content='line1\nline2\n',
259 message='commit2', vcs_type=backend.alias, parent=commit0)
259 message='commit2', vcs_type=backend.alias, parent=commit0)
260
260
261 # fork this repo
261 # fork this repo
262 repo2 = backend.create_fork()
262 repo2 = backend.create_fork()
263
263
264 # now make commit3-6
264 # now make commit3-6
265 commit2 = commit_change(
265 commit2 = commit_change(
266 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
266 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
267 message='commit3', vcs_type=backend.alias, parent=commit1)
267 message='commit3', vcs_type=backend.alias, parent=commit1)
268 commit3 = commit_change(
268 commit3 = commit_change(
269 repo1.repo_name, filename='file1',
269 repo1.repo_name, filename='file1',
270 content='line1\nline2\nline3\nline4\n', message='commit4',
270 content='line1\nline2\nline3\nline4\n', message='commit4',
271 vcs_type=backend.alias, parent=commit2)
271 vcs_type=backend.alias, parent=commit2)
272 commit4 = commit_change(
272 commit4 = commit_change(
273 repo1.repo_name, filename='file1',
273 repo1.repo_name, filename='file1',
274 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
274 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
275 vcs_type=backend.alias, parent=commit3)
275 vcs_type=backend.alias, parent=commit3)
276 commit_change( # commit 5
276 commit_change( # commit 5
277 repo1.repo_name, filename='file1',
277 repo1.repo_name, filename='file1',
278 content='line1\nline2\nline3\nline4\nline5\nline6\n',
278 content='line1\nline2\nline3\nline4\nline5\nline6\n',
279 message='commit6', vcs_type=backend.alias, parent=commit4)
279 message='commit6', vcs_type=backend.alias, parent=commit4)
280
280
281 response = self.app.get(
281 response = self.app.get(
282 route_path('repo_compare',
282 route_path('repo_compare',
283 repo_name=repo2.repo_name,
283 repo_name=repo2.repo_name,
284 # parent of commit2, in target repo2
284 # parent of commit2, in target repo2
285 source_ref_type="rev", source_ref=commit1.raw_id,
285 source_ref_type="rev", source_ref=commit1.raw_id,
286 target_ref_type="rev", target_ref=commit4.raw_id,
286 target_ref_type="rev", target_ref=commit4.raw_id,
287 params=dict(merge='1', target_repo=repo1.repo_name),
287 params=dict(merge='1', target_repo=repo1.repo_name),
288 ))
288 ))
289 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
289 response.mustcontain('%s@%s' % (repo2.repo_name, commit1.short_id))
290 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
290 response.mustcontain('%s@%s' % (repo1.repo_name, commit4.short_id))
291
291
292 # files
292 # files
293 compare_page = ComparePage(response)
293 compare_page = ComparePage(response)
294 compare_page.contains_change_summary(1, 3, 0)
294 compare_page.contains_change_summary(1, 3, 0)
295 compare_page.contains_commits([commit2, commit3, commit4])
295 compare_page.contains_commits([commit2, commit3, commit4])
296 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
296 anchor = 'a_c-{}-826e8142e6ba'.format(commit1.short_id)
297 compare_page.contains_file_links_and_anchors([('file1', anchor),])
297 compare_page.contains_file_links_and_anchors([('file1', anchor),])
298
298
299 @pytest.mark.xfail_backends("svn")
299 @pytest.mark.xfail_backends("svn")
300 def test_compare_cherry_pick_commits_from_top(self, backend):
300 def test_compare_cherry_pick_commits_from_top(self, backend):
301 # repo1:
301 # repo1:
302 # commit0:
302 # commit0:
303 # commit1:
303 # commit1:
304 # repo1-fork- in which we will cherry pick bottom commits
304 # repo1-fork- in which we will cherry pick bottom commits
305 # commit0:
305 # commit0:
306 # commit1:
306 # commit1:
307 # commit2:
307 # commit2:
308 # commit3: x
308 # commit3: x
309 # commit4: x
309 # commit4: x
310 # commit5: x
310 # commit5: x
311
311
312 # make repo1, and commit1+commit2
312 # make repo1, and commit1+commit2
313 repo1 = backend.create_repo()
313 repo1 = backend.create_repo()
314
314
315 # commit something !
315 # commit something !
316 commit0 = commit_change(
316 commit0 = commit_change(
317 repo1.repo_name, filename='file1', content='line1\n',
317 repo1.repo_name, filename='file1', content='line1\n',
318 message='commit1', vcs_type=backend.alias, parent=None,
318 message='commit1', vcs_type=backend.alias, parent=None,
319 newfile=True)
319 newfile=True)
320 commit1 = commit_change(
320 commit1 = commit_change(
321 repo1.repo_name, filename='file1', content='line1\nline2\n',
321 repo1.repo_name, filename='file1', content='line1\nline2\n',
322 message='commit2', vcs_type=backend.alias, parent=commit0)
322 message='commit2', vcs_type=backend.alias, parent=commit0)
323
323
324 # fork this repo
324 # fork this repo
325 backend.create_fork()
325 backend.create_fork()
326
326
327 # now make commit3-6
327 # now make commit3-6
328 commit2 = commit_change(
328 commit2 = commit_change(
329 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
329 repo1.repo_name, filename='file1', content='line1\nline2\nline3\n',
330 message='commit3', vcs_type=backend.alias, parent=commit1)
330 message='commit3', vcs_type=backend.alias, parent=commit1)
331 commit3 = commit_change(
331 commit3 = commit_change(
332 repo1.repo_name, filename='file1',
332 repo1.repo_name, filename='file1',
333 content='line1\nline2\nline3\nline4\n', message='commit4',
333 content='line1\nline2\nline3\nline4\n', message='commit4',
334 vcs_type=backend.alias, parent=commit2)
334 vcs_type=backend.alias, parent=commit2)
335 commit4 = commit_change(
335 commit4 = commit_change(
336 repo1.repo_name, filename='file1',
336 repo1.repo_name, filename='file1',
337 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
337 content='line1\nline2\nline3\nline4\nline5\n', message='commit5',
338 vcs_type=backend.alias, parent=commit3)
338 vcs_type=backend.alias, parent=commit3)
339 commit5 = commit_change(
339 commit5 = commit_change(
340 repo1.repo_name, filename='file1',
340 repo1.repo_name, filename='file1',
341 content='line1\nline2\nline3\nline4\nline5\nline6\n',
341 content='line1\nline2\nline3\nline4\nline5\nline6\n',
342 message='commit6', vcs_type=backend.alias, parent=commit4)
342 message='commit6', vcs_type=backend.alias, parent=commit4)
343
343
344 response = self.app.get(
344 response = self.app.get(
345 route_path('repo_compare',
345 route_path('repo_compare',
346 repo_name=repo1.repo_name,
346 repo_name=repo1.repo_name,
347 # parent of commit3, not in source repo2
347 # parent of commit3, not in source repo2
348 source_ref_type="rev", source_ref=commit2.raw_id,
348 source_ref_type="rev", source_ref=commit2.raw_id,
349 target_ref_type="rev", target_ref=commit5.raw_id,
349 target_ref_type="rev", target_ref=commit5.raw_id,
350 params=dict(merge='1'),))
350 params=dict(merge='1'),))
351
351
352 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
352 response.mustcontain('%s@%s' % (repo1.repo_name, commit2.short_id))
353 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
353 response.mustcontain('%s@%s' % (repo1.repo_name, commit5.short_id))
354
354
355 compare_page = ComparePage(response)
355 compare_page = ComparePage(response)
356 compare_page.contains_change_summary(1, 3, 0)
356 compare_page.contains_change_summary(1, 3, 0)
357 compare_page.contains_commits([commit3, commit4, commit5])
357 compare_page.contains_commits([commit3, commit4, commit5])
358
358
359 # files
359 # files
360 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
360 anchor = 'a_c-{}-826e8142e6ba'.format(commit2.short_id)
361 compare_page.contains_file_links_and_anchors([('file1', anchor),])
361 compare_page.contains_file_links_and_anchors([('file1', anchor),])
362
362
363 @pytest.mark.xfail_backends("svn")
363 @pytest.mark.xfail_backends("svn")
364 def test_compare_remote_branches(self, backend):
364 def test_compare_remote_branches(self, backend):
365 repo1 = backend.repo
365 repo1 = backend.repo
366 repo2 = backend.create_fork()
366 repo2 = backend.create_fork()
367
367
368 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
368 commit_id1 = repo1.get_commit(commit_idx=3).raw_id
369 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
369 commit_id1_short = repo1.get_commit(commit_idx=3).short_id
370 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
370 commit_id2 = repo1.get_commit(commit_idx=6).raw_id
371 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
371 commit_id2_short = repo1.get_commit(commit_idx=6).short_id
372
372
373 response = self.app.get(
373 response = self.app.get(
374 route_path('repo_compare',
374 route_path('repo_compare',
375 repo_name=repo1.repo_name,
375 repo_name=repo1.repo_name,
376 source_ref_type="rev", source_ref=commit_id1,
376 source_ref_type="rev", source_ref=commit_id1,
377 target_ref_type="rev", target_ref=commit_id2,
377 target_ref_type="rev", target_ref=commit_id2,
378 params=dict(merge='1', target_repo=repo2.repo_name),
378 params=dict(merge='1', target_repo=repo2.repo_name),
379 ))
379 ))
380
380
381 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
381 response.mustcontain('%s@%s' % (repo1.repo_name, commit_id1))
382 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
382 response.mustcontain('%s@%s' % (repo2.repo_name, commit_id2))
383
383
384 compare_page = ComparePage(response)
384 compare_page = ComparePage(response)
385
385
386 # outgoing commits between those commits
386 # outgoing commits between those commits
387 compare_page.contains_commits(
387 compare_page.contains_commits(
388 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
388 [repo2.get_commit(commit_idx=x) for x in [4, 5, 6]])
389
389
390 # files
390 # files
391 compare_page.contains_file_links_and_anchors([
391 compare_page.contains_file_links_and_anchors([
392 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
392 ('vcs/backends/hg.py', 'a_c-{}-9c390eb52cd6'.format(commit_id2_short)),
393 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
393 ('vcs/backends/__init__.py', 'a_c-{}-41b41c1f2796'.format(commit_id1_short)),
394 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
394 ('vcs/backends/base.py', 'a_c-{}-2f574d260608'.format(commit_id1_short)),
395 ])
395 ])
396
396
397 @pytest.mark.xfail_backends("svn")
397 @pytest.mark.xfail_backends("svn")
398 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
398 def test_source_repo_new_commits_after_forking_simple_diff(self, backend):
399 repo1 = backend.create_repo()
399 repo1 = backend.create_repo()
400 r1_name = repo1.repo_name
400 r1_name = repo1.repo_name
401
401
402 commit0 = commit_change(
402 commit0 = commit_change(
403 repo=r1_name, filename='file1',
403 repo=r1_name, filename='file1',
404 content='line1', message='commit1', vcs_type=backend.alias,
404 content='line1', message='commit1', vcs_type=backend.alias,
405 newfile=True)
405 newfile=True)
406 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
406 assert repo1.scm_instance().commit_ids == [commit0.raw_id]
407
407
408 # fork the repo1
408 # fork the repo1
409 repo2 = backend.create_fork()
409 repo2 = backend.create_fork()
410 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
410 assert repo2.scm_instance().commit_ids == [commit0.raw_id]
411
411
412 self.r2_id = repo2.repo_id
412 self.r2_id = repo2.repo_id
413 r2_name = repo2.repo_name
413 r2_name = repo2.repo_name
414
414
415 commit1 = commit_change(
415 commit1 = commit_change(
416 repo=r2_name, filename='file1-fork',
416 repo=r2_name, filename='file1-fork',
417 content='file1-line1-from-fork', message='commit1-fork',
417 content='file1-line1-from-fork', message='commit1-fork',
418 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
418 vcs_type=backend.alias, parent=repo2.scm_instance()[-1],
419 newfile=True)
419 newfile=True)
420
420
421 commit2 = commit_change(
421 commit2 = commit_change(
422 repo=r2_name, filename='file2-fork',
422 repo=r2_name, filename='file2-fork',
423 content='file2-line1-from-fork', message='commit2-fork',
423 content='file2-line1-from-fork', message='commit2-fork',
424 vcs_type=backend.alias, parent=commit1,
424 vcs_type=backend.alias, parent=commit1,
425 newfile=True)
425 newfile=True)
426
426
427 commit_change( # commit 3
427 commit_change( # commit 3
428 repo=r2_name, filename='file3-fork',
428 repo=r2_name, filename='file3-fork',
429 content='file3-line1-from-fork', message='commit3-fork',
429 content='file3-line1-from-fork', message='commit3-fork',
430 vcs_type=backend.alias, parent=commit2, newfile=True)
430 vcs_type=backend.alias, parent=commit2, newfile=True)
431
431
432 # compare !
432 # compare !
433 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
433 commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME
434 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
434 commit_id2 = repo2.scm_instance().DEFAULT_BRANCH_NAME
435
435
436 response = self.app.get(
436 response = self.app.get(
437 route_path('repo_compare',
437 route_path('repo_compare',
438 repo_name=r2_name,
438 repo_name=r2_name,
439 source_ref_type="branch", source_ref=commit_id1,
439 source_ref_type="branch", source_ref=commit_id1,
440 target_ref_type="branch", target_ref=commit_id2,
440 target_ref_type="branch", target_ref=commit_id2,
441 params=dict(merge='1', target_repo=r1_name),
441 params=dict(merge='1', target_repo=r1_name),
442 ))
442 ))
443
443
444 response.mustcontain('%s@%s' % (r2_name, commit_id1))
444 response.mustcontain('%s@%s' % (r2_name, commit_id1))
445 response.mustcontain('%s@%s' % (r1_name, commit_id2))
445 response.mustcontain('%s@%s' % (r1_name, commit_id2))
446 response.mustcontain('No files')
446 response.mustcontain('No files')
447 response.mustcontain('No commits in this compare')
447 response.mustcontain('No commits in this compare')
448
448
449 commit0 = commit_change(
449 commit0 = commit_change(
450 repo=r1_name, filename='file2',
450 repo=r1_name, filename='file2',
451 content='line1-added-after-fork', message='commit2-parent',
451 content='line1-added-after-fork', message='commit2-parent',
452 vcs_type=backend.alias, parent=None, newfile=True)
452 vcs_type=backend.alias, parent=None, newfile=True)
453
453
454 # compare !
454 # compare !
455 response = self.app.get(
455 response = self.app.get(
456 route_path('repo_compare',
456 route_path('repo_compare',
457 repo_name=r2_name,
457 repo_name=r2_name,
458 source_ref_type="branch", source_ref=commit_id1,
458 source_ref_type="branch", source_ref=commit_id1,
459 target_ref_type="branch", target_ref=commit_id2,
459 target_ref_type="branch", target_ref=commit_id2,
460 params=dict(merge='1', target_repo=r1_name),
460 params=dict(merge='1', target_repo=r1_name),
461 ))
461 ))
462
462
463 response.mustcontain('%s@%s' % (r2_name, commit_id1))
463 response.mustcontain('%s@%s' % (r2_name, commit_id1))
464 response.mustcontain('%s@%s' % (r1_name, commit_id2))
464 response.mustcontain('%s@%s' % (r1_name, commit_id2))
465
465
466 response.mustcontain("""commit2-parent""")
466 response.mustcontain("""commit2-parent""")
467 response.mustcontain("""line1-added-after-fork""")
467 response.mustcontain("""line1-added-after-fork""")
468 compare_page = ComparePage(response)
468 compare_page = ComparePage(response)
469 compare_page.contains_change_summary(1, 1, 0)
469 compare_page.contains_change_summary(1, 1, 0)
470
470
471 @pytest.mark.xfail_backends("svn")
471 @pytest.mark.xfail_backends("svn")
472 def test_compare_commits(self, backend, xhr_header):
472 def test_compare_commits(self, backend, xhr_header):
473 commit0 = backend.repo.get_commit(commit_idx=0)
473 commit0 = backend.repo.get_commit(commit_idx=0)
474 commit1 = backend.repo.get_commit(commit_idx=1)
474 commit1 = backend.repo.get_commit(commit_idx=1)
475
475
476 response = self.app.get(
476 response = self.app.get(
477 route_path('repo_compare',
477 route_path('repo_compare',
478 repo_name=backend.repo_name,
478 repo_name=backend.repo_name,
479 source_ref_type="rev", source_ref=commit0.raw_id,
479 source_ref_type="rev", source_ref=commit0.raw_id,
480 target_ref_type="rev", target_ref=commit1.raw_id,
480 target_ref_type="rev", target_ref=commit1.raw_id,
481 params=dict(merge='1')
481 params=dict(merge='1')
482 ),
482 ),
483 extra_environ=xhr_header, )
483 extra_environ=xhr_header, )
484
484
485 # outgoing commits between those commits
485 # outgoing commits between those commits
486 compare_page = ComparePage(response)
486 compare_page = ComparePage(response)
487 compare_page.contains_commits(commits=[commit1])
487 compare_page.contains_commits(commits=[commit1])
488
488
489 def test_errors_when_comparing_unknown_source_repo(self, backend):
489 def test_errors_when_comparing_unknown_source_repo(self, backend):
490 repo = backend.repo
490 repo = backend.repo
491 badrepo = 'badrepo'
491 badrepo = 'badrepo'
492
492
493 response = self.app.get(
493 response = self.app.get(
494 route_path('repo_compare',
494 route_path('repo_compare',
495 repo_name=badrepo,
495 repo_name=badrepo,
496 source_ref_type="rev", source_ref='tip',
496 source_ref_type="rev", source_ref='tip',
497 target_ref_type="rev", target_ref='tip',
497 target_ref_type="rev", target_ref='tip',
498 params=dict(merge='1', target_repo=repo.repo_name)
498 params=dict(merge='1', target_repo=repo.repo_name)
499 ),
499 ),
500 status=404)
500 status=404)
501
501
502 def test_errors_when_comparing_unknown_target_repo(self, backend):
502 def test_errors_when_comparing_unknown_target_repo(self, backend):
503 repo = backend.repo
503 repo = backend.repo
504 badrepo = 'badrepo'
504 badrepo = 'badrepo'
505
505
506 response = self.app.get(
506 response = self.app.get(
507 route_path('repo_compare',
507 route_path('repo_compare',
508 repo_name=repo.repo_name,
508 repo_name=repo.repo_name,
509 source_ref_type="rev", source_ref='tip',
509 source_ref_type="rev", source_ref='tip',
510 target_ref_type="rev", target_ref='tip',
510 target_ref_type="rev", target_ref='tip',
511 params=dict(merge='1', target_repo=badrepo),
511 params=dict(merge='1', target_repo=badrepo),
512 ),
512 ),
513 status=302)
513 status=302)
514 redirected = response.follow()
514 redirected = response.follow()
515 redirected.mustcontain(
515 redirected.mustcontain(
516 'Could not find the target repo: `{}`'.format(badrepo))
516 'Could not find the target repo: `{}`'.format(badrepo))
517
517
518 def test_compare_not_in_preview_mode(self, backend_stub):
518 def test_compare_not_in_preview_mode(self, backend_stub):
519 commit0 = backend_stub.repo.get_commit(commit_idx=0)
519 commit0 = backend_stub.repo.get_commit(commit_idx=0)
520 commit1 = backend_stub.repo.get_commit(commit_idx=1)
520 commit1 = backend_stub.repo.get_commit(commit_idx=1)
521
521
522 response = self.app.get(
522 response = self.app.get(
523 route_path('repo_compare',
523 route_path('repo_compare',
524 repo_name=backend_stub.repo_name,
524 repo_name=backend_stub.repo_name,
525 source_ref_type="rev", source_ref=commit0.raw_id,
525 source_ref_type="rev", source_ref=commit0.raw_id,
526 target_ref_type="rev", target_ref=commit1.raw_id,
526 target_ref_type="rev", target_ref=commit1.raw_id,
527 ))
527 ))
528
528
529 # outgoing commits between those commits
529 # outgoing commits between those commits
530 compare_page = ComparePage(response)
530 compare_page = ComparePage(response)
531 compare_page.swap_is_visible()
531 compare_page.swap_is_visible()
532 compare_page.target_source_are_enabled()
532 compare_page.target_source_are_enabled()
533
533
534 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
534 def test_compare_of_fork_with_largefiles(self, backend_hg, settings_util):
535 orig = backend_hg.create_repo(number_of_commits=1)
535 orig = backend_hg.create_repo(number_of_commits=1)
536 fork = backend_hg.create_fork()
536 fork = backend_hg.create_fork()
537
537
538 settings_util.create_repo_rhodecode_ui(
538 settings_util.create_repo_rhodecode_ui(
539 orig, 'extensions', value='', key='largefiles', active=False)
539 orig, 'extensions', value='', key='largefiles', active=False)
540 settings_util.create_repo_rhodecode_ui(
540 settings_util.create_repo_rhodecode_ui(
541 fork, 'extensions', value='', key='largefiles', active=True)
541 fork, 'extensions', value='', key='largefiles', active=True)
542
542
543 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
543 compare_module = ('rhodecode.lib.vcs.backends.hg.repository.'
544 'MercurialRepository.compare')
544 'MercurialRepository.compare')
545 with mock.patch(compare_module) as compare_mock:
545 with mock.patch(compare_module) as compare_mock:
546 compare_mock.side_effect = RepositoryRequirementError()
546 compare_mock.side_effect = RepositoryRequirementError()
547
547
548 response = self.app.get(
548 response = self.app.get(
549 route_path('repo_compare',
549 route_path('repo_compare',
550 repo_name=orig.repo_name,
550 repo_name=orig.repo_name,
551 source_ref_type="rev", source_ref="tip",
551 source_ref_type="rev", source_ref="tip",
552 target_ref_type="rev", target_ref="tip",
552 target_ref_type="rev", target_ref="tip",
553 params=dict(merge='1', target_repo=fork.repo_name),
553 params=dict(merge='1', target_repo=fork.repo_name),
554 ),
554 ),
555 status=302)
555 status=302)
556
556
557 assert_session_flash(
557 assert_session_flash(
558 response,
558 response,
559 'Could not compare repos with different large file settings')
559 'Could not compare repos with different large file settings')
560
560
561
561
562 @pytest.mark.usefixtures("autologin_user")
562 @pytest.mark.usefixtures("autologin_user")
563 class TestCompareControllerSvn(object):
563 class TestCompareControllerSvn(object):
564
564
565 def test_supports_references_with_path(self, app, backend_svn):
565 def test_supports_references_with_path(self, app, backend_svn):
566 repo = backend_svn['svn-simple-layout']
566 repo = backend_svn['svn-simple-layout']
567 commit_id = repo.get_commit(commit_idx=-1).raw_id
567 commit_id = repo.get_commit(commit_idx=-1).raw_id
568 response = app.get(
568 response = app.get(
569 route_path('repo_compare',
569 route_path('repo_compare',
570 repo_name=repo.repo_name,
570 repo_name=repo.repo_name,
571 source_ref_type="tag",
571 source_ref_type="tag",
572 source_ref="%s@%s" % ('tags/v0.1', commit_id),
572 source_ref="%s@%s" % ('tags/v0.1', commit_id),
573 target_ref_type="tag",
573 target_ref_type="tag",
574 target_ref="%s@%s" % ('tags/v0.2', commit_id),
574 target_ref="%s@%s" % ('tags/v0.2', commit_id),
575 params=dict(merge='1'),
575 params=dict(merge='1'),
576 ),
576 ),
577 status=200)
577 status=200)
578
578
579 # Expecting no commits, since both paths are at the same revision
579 # Expecting no commits, since both paths are at the same revision
580 response.mustcontain('No commits in this compare')
580 response.mustcontain('No commits in this compare')
581
581
582 # Should find only one file changed when comparing those two tags
582 # Should find only one file changed when comparing those two tags
583 response.mustcontain('example.py')
583 response.mustcontain('example.py')
584 compare_page = ComparePage(response)
584 compare_page = ComparePage(response)
585 compare_page.contains_change_summary(1, 5, 1)
585 compare_page.contains_change_summary(1, 5, 1)
586
586
587 def test_shows_commits_if_different_ids(self, app, backend_svn):
587 def test_shows_commits_if_different_ids(self, app, backend_svn):
588 repo = backend_svn['svn-simple-layout']
588 repo = backend_svn['svn-simple-layout']
589 source_id = repo.get_commit(commit_idx=-6).raw_id
589 source_id = repo.get_commit(commit_idx=-6).raw_id
590 target_id = repo.get_commit(commit_idx=-1).raw_id
590 target_id = repo.get_commit(commit_idx=-1).raw_id
591 response = app.get(
591 response = app.get(
592 route_path('repo_compare',
592 route_path('repo_compare',
593 repo_name=repo.repo_name,
593 repo_name=repo.repo_name,
594 source_ref_type="tag",
594 source_ref_type="tag",
595 source_ref="%s@%s" % ('tags/v0.1', source_id),
595 source_ref="%s@%s" % ('tags/v0.1', source_id),
596 target_ref_type="tag",
596 target_ref_type="tag",
597 target_ref="%s@%s" % ('tags/v0.2', target_id),
597 target_ref="%s@%s" % ('tags/v0.2', target_id),
598 params=dict(merge='1')
598 params=dict(merge='1')
599 ),
599 ),
600 status=200)
600 status=200)
601
601
602 # It should show commits
602 # It should show commits
603 assert 'No commits in this compare' not in response.body
603 assert 'No commits in this compare' not in response.text
604
604
605 # Should find only one file changed when comparing those two tags
605 # Should find only one file changed when comparing those two tags
606 response.mustcontain('example.py')
606 response.mustcontain('example.py')
607 compare_page = ComparePage(response)
607 compare_page = ComparePage(response)
608 compare_page.contains_change_summary(1, 5, 1)
608 compare_page.contains_change_summary(1, 5, 1)
609
609
610
610
611 class ComparePage(AssertResponse):
611 class ComparePage(AssertResponse):
612 """
612 """
613 Abstracts the page template from the tests
613 Abstracts the page template from the tests
614 """
614 """
615
615
616 def contains_file_links_and_anchors(self, files):
616 def contains_file_links_and_anchors(self, files):
617 doc = lxml.html.fromstring(self.response.body)
617 doc = lxml.html.fromstring(self.response.body)
618 for filename, file_id in files:
618 for filename, file_id in files:
619 self.contains_one_anchor(file_id)
619 self.contains_one_anchor(file_id)
620 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
620 diffblock = doc.cssselect('[data-f-path="%s"]' % filename)
621 assert len(diffblock) == 2
621 assert len(diffblock) == 2
622 for lnk in diffblock[0].cssselect('a'):
622 for lnk in diffblock[0].cssselect('a'):
623 if 'permalink' in lnk.text:
623 if 'permalink' in lnk.text:
624 assert '#{}'.format(file_id) in lnk.attrib['href']
624 assert '#{}'.format(file_id) in lnk.attrib['href']
625 break
625 break
626 else:
626 else:
627 pytest.fail('Unable to find permalink')
627 pytest.fail('Unable to find permalink')
628
628
629 def contains_change_summary(self, files_changed, inserted, deleted):
629 def contains_change_summary(self, files_changed, inserted, deleted):
630 template = (
630 template = (
631 '{files_changed} file{plural} changed: '
631 '{files_changed} file{plural} changed: '
632 '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>')
632 '<span class="op-added">{inserted} inserted</span>, <span class="op-deleted">{deleted} deleted</span>')
633 self.response.mustcontain(template.format(
633 self.response.mustcontain(template.format(
634 files_changed=files_changed,
634 files_changed=files_changed,
635 plural="s" if files_changed > 1 else "",
635 plural="s" if files_changed > 1 else "",
636 inserted=inserted,
636 inserted=inserted,
637 deleted=deleted))
637 deleted=deleted))
638
638
639 def contains_commits(self, commits, ancestors=None):
639 def contains_commits(self, commits, ancestors=None):
640 response = self.response
640 response = self.response
641
641
642 for commit in commits:
642 for commit in commits:
643 # Expecting to see the commit message in an element which
643 # Expecting to see the commit message in an element which
644 # has the ID "c-{commit.raw_id}"
644 # has the ID "c-{commit.raw_id}"
645 self.element_contains('#c-' + commit.raw_id, commit.message)
645 self.element_contains('#c-' + commit.raw_id, commit.message)
646 self.contains_one_link(
646 self.contains_one_link(
647 'r%s:%s' % (commit.idx, commit.short_id),
647 'r%s:%s' % (commit.idx, commit.short_id),
648 self._commit_url(commit))
648 self._commit_url(commit))
649
649
650 if ancestors:
650 if ancestors:
651 response.mustcontain('Ancestor')
651 response.mustcontain('Ancestor')
652 for ancestor in ancestors:
652 for ancestor in ancestors:
653 self.contains_one_link(
653 self.contains_one_link(
654 ancestor.short_id, self._commit_url(ancestor))
654 ancestor.short_id, self._commit_url(ancestor))
655
655
656 def _commit_url(self, commit):
656 def _commit_url(self, commit):
657 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
657 return '/%s/changeset/%s' % (commit.repository.name, commit.raw_id)
658
658
659 def swap_is_hidden(self):
659 def swap_is_hidden(self):
660 assert '<a id="btn-swap"' not in self.response.text
660 assert '<a id="btn-swap"' not in self.response.text
661
661
662 def swap_is_visible(self):
662 def swap_is_visible(self):
663 assert '<a id="btn-swap"' in self.response.text
663 assert '<a id="btn-swap"' in self.response.text
664
664
665 def target_source_are_disabled(self):
665 def target_source_are_disabled(self):
666 response = self.response
666 response = self.response
667 response.mustcontain("var enable_fields = false;")
667 response.mustcontain("var enable_fields = false;")
668 response.mustcontain('.select2("enable", enable_fields)')
668 response.mustcontain('.select2("enable", enable_fields)')
669
669
670 def target_source_are_enabled(self):
670 def target_source_are_enabled(self):
671 response = self.response
671 response = self.response
672 response.mustcontain("var enable_fields = true;")
672 response.mustcontain("var enable_fields = true;")
@@ -1,1092 +1,1092 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
26 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
27 from rhodecode.apps.repository.views.repo_files import RepoFilesView
27 from rhodecode.apps.repository.views.repo_files import RepoFilesView
28 from rhodecode.lib import helpers as h
28 from rhodecode.lib import helpers as h
29 from collections import OrderedDict
29 from collections import OrderedDict
30 from rhodecode.lib.ext_json import json
30 from rhodecode.lib.ext_json import json
31 from rhodecode.lib.vcs import nodes
31 from rhodecode.lib.vcs import nodes
32
32
33 from rhodecode.lib.vcs.conf import settings
33 from rhodecode.lib.vcs.conf import settings
34 from rhodecode.tests import assert_session_flash
34 from rhodecode.tests import assert_session_flash
35 from rhodecode.tests.fixture import Fixture
35 from rhodecode.tests.fixture import Fixture
36 from rhodecode.model.db import Session
36 from rhodecode.model.db import Session
37
37
38 fixture = Fixture()
38 fixture = Fixture()
39
39
40
40
41 def get_node_history(backend_type):
41 def get_node_history(backend_type):
42 return {
42 return {
43 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
43 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
44 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
44 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
45 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
45 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
46 }[backend_type]
46 }[backend_type]
47
47
48
48
49 def route_path(name, params=None, **kwargs):
49 def route_path(name, params=None, **kwargs):
50 import urllib.request, urllib.parse, urllib.error
50 import urllib.request, urllib.parse, urllib.error
51
51
52 base_url = {
52 base_url = {
53 'repo_summary': '/{repo_name}',
53 'repo_summary': '/{repo_name}',
54 'repo_archivefile': '/{repo_name}/archive/{fname}',
54 'repo_archivefile': '/{repo_name}/archive/{fname}',
55 'repo_files_diff': '/{repo_name}/diff/{f_path}',
55 'repo_files_diff': '/{repo_name}/diff/{f_path}',
56 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
56 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
57 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
57 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
58 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
58 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
59 'repo_files:default_commit': '/{repo_name}/files',
59 'repo_files:default_commit': '/{repo_name}/files',
60 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
60 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
61 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
61 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
62 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
62 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
63 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
63 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
64 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
64 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
65 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
65 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
66 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
66 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
67 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
67 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
68 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
68 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
69 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
69 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
70 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
70 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
71 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
71 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
72 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
72 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
73 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
73 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
74 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
74 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
75 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
75 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
76 }[name].format(**kwargs)
76 }[name].format(**kwargs)
77
77
78 if params:
78 if params:
79 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
79 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
80 return base_url
80 return base_url
81
81
82
82
83 def assert_files_in_response(response, files, params):
83 def assert_files_in_response(response, files, params):
84 template = (
84 template = (
85 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
85 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
86 _assert_items_in_response(response, files, template, params)
86 _assert_items_in_response(response, files, template, params)
87
87
88
88
89 def assert_dirs_in_response(response, dirs, params):
89 def assert_dirs_in_response(response, dirs, params):
90 template = (
90 template = (
91 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
91 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
92 _assert_items_in_response(response, dirs, template, params)
92 _assert_items_in_response(response, dirs, template, params)
93
93
94
94
95 def _assert_items_in_response(response, items, template, params):
95 def _assert_items_in_response(response, items, template, params):
96 for item in items:
96 for item in items:
97 item_params = {'name': item}
97 item_params = {'name': item}
98 item_params.update(params)
98 item_params.update(params)
99 response.mustcontain(template % item_params)
99 response.mustcontain(template % item_params)
100
100
101
101
102 def assert_timeago_in_response(response, items, params):
102 def assert_timeago_in_response(response, items, params):
103 for item in items:
103 for item in items:
104 response.mustcontain(h.age_component(params['date']))
104 response.mustcontain(h.age_component(params['date']))
105
105
106
106
107 @pytest.mark.usefixtures("app")
107 @pytest.mark.usefixtures("app")
108 class TestFilesViews(object):
108 class TestFilesViews(object):
109
109
110 def test_show_files(self, backend):
110 def test_show_files(self, backend):
111 response = self.app.get(
111 response = self.app.get(
112 route_path('repo_files',
112 route_path('repo_files',
113 repo_name=backend.repo_name,
113 repo_name=backend.repo_name,
114 commit_id='tip', f_path='/'))
114 commit_id='tip', f_path='/'))
115 commit = backend.repo.get_commit()
115 commit = backend.repo.get_commit()
116
116
117 params = {
117 params = {
118 'repo_name': backend.repo_name,
118 'repo_name': backend.repo_name,
119 'commit_id': commit.raw_id,
119 'commit_id': commit.raw_id,
120 'date': commit.date
120 'date': commit.date
121 }
121 }
122 assert_dirs_in_response(response, ['docs', 'vcs'], params)
122 assert_dirs_in_response(response, ['docs', 'vcs'], params)
123 files = [
123 files = [
124 '.gitignore',
124 '.gitignore',
125 '.hgignore',
125 '.hgignore',
126 '.hgtags',
126 '.hgtags',
127 # TODO: missing in Git
127 # TODO: missing in Git
128 # '.travis.yml',
128 # '.travis.yml',
129 'MANIFEST.in',
129 'MANIFEST.in',
130 'README.rst',
130 'README.rst',
131 # TODO: File is missing in svn repository
131 # TODO: File is missing in svn repository
132 # 'run_test_and_report.sh',
132 # 'run_test_and_report.sh',
133 'setup.cfg',
133 'setup.cfg',
134 'setup.py',
134 'setup.py',
135 'test_and_report.sh',
135 'test_and_report.sh',
136 'tox.ini',
136 'tox.ini',
137 ]
137 ]
138 assert_files_in_response(response, files, params)
138 assert_files_in_response(response, files, params)
139 assert_timeago_in_response(response, files, params)
139 assert_timeago_in_response(response, files, params)
140
140
141 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
141 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
142 repo = backend_hg['subrepos']
142 repo = backend_hg['subrepos']
143 response = self.app.get(
143 response = self.app.get(
144 route_path('repo_files',
144 route_path('repo_files',
145 repo_name=repo.repo_name,
145 repo_name=repo.repo_name,
146 commit_id='tip', f_path='/'))
146 commit_id='tip', f_path='/'))
147 assert_response = response.assert_response()
147 assert_response = response.assert_response()
148 assert_response.contains_one_link(
148 assert_response.contains_one_link(
149 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
149 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
150
150
151 def test_show_files_links_submodules_with_absolute_url_subpaths(
151 def test_show_files_links_submodules_with_absolute_url_subpaths(
152 self, backend_hg):
152 self, backend_hg):
153 repo = backend_hg['subrepos']
153 repo = backend_hg['subrepos']
154 response = self.app.get(
154 response = self.app.get(
155 route_path('repo_files',
155 route_path('repo_files',
156 repo_name=repo.repo_name,
156 repo_name=repo.repo_name,
157 commit_id='tip', f_path='/'))
157 commit_id='tip', f_path='/'))
158 assert_response = response.assert_response()
158 assert_response = response.assert_response()
159 assert_response.contains_one_link(
159 assert_response.contains_one_link(
160 'subpaths-path @ 000000000000',
160 'subpaths-path @ 000000000000',
161 'http://sub-base.example.com/subpaths-path')
161 'http://sub-base.example.com/subpaths-path')
162
162
163 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
163 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
164 def test_files_menu(self, backend):
164 def test_files_menu(self, backend):
165 new_branch = "temp_branch_name"
165 new_branch = "temp_branch_name"
166 commits = [
166 commits = [
167 {'message': 'a'},
167 {'message': 'a'},
168 {'message': 'b', 'branch': new_branch}
168 {'message': 'b', 'branch': new_branch}
169 ]
169 ]
170 backend.create_repo(commits)
170 backend.create_repo(commits)
171 backend.repo.landing_rev = "branch:%s" % new_branch
171 backend.repo.landing_rev = "branch:%s" % new_branch
172 Session().commit()
172 Session().commit()
173
173
174 # get response based on tip and not new commit
174 # get response based on tip and not new commit
175 response = self.app.get(
175 response = self.app.get(
176 route_path('repo_files',
176 route_path('repo_files',
177 repo_name=backend.repo_name,
177 repo_name=backend.repo_name,
178 commit_id='tip', f_path='/'))
178 commit_id='tip', f_path='/'))
179
179
180 # make sure Files menu url is not tip but new commit
180 # make sure Files menu url is not tip but new commit
181 landing_rev = backend.repo.landing_ref_name
181 landing_rev = backend.repo.landing_ref_name
182 files_url = route_path('repo_files:default_path',
182 files_url = route_path('repo_files:default_path',
183 repo_name=backend.repo_name,
183 repo_name=backend.repo_name,
184 commit_id=landing_rev, params={'at': landing_rev})
184 commit_id=landing_rev, params={'at': landing_rev})
185
185
186 assert landing_rev != 'tip'
186 assert landing_rev != 'tip'
187 response.mustcontain(
187 response.mustcontain(
188 '<li class="active"><a class="menulink" href="%s">' % files_url)
188 '<li class="active"><a class="menulink" href="%s">' % files_url)
189
189
190 def test_show_files_commit(self, backend):
190 def test_show_files_commit(self, backend):
191 commit = backend.repo.get_commit(commit_idx=32)
191 commit = backend.repo.get_commit(commit_idx=32)
192
192
193 response = self.app.get(
193 response = self.app.get(
194 route_path('repo_files',
194 route_path('repo_files',
195 repo_name=backend.repo_name,
195 repo_name=backend.repo_name,
196 commit_id=commit.raw_id, f_path='/'))
196 commit_id=commit.raw_id, f_path='/'))
197
197
198 dirs = ['docs', 'tests']
198 dirs = ['docs', 'tests']
199 files = ['README.rst']
199 files = ['README.rst']
200 params = {
200 params = {
201 'repo_name': backend.repo_name,
201 'repo_name': backend.repo_name,
202 'commit_id': commit.raw_id,
202 'commit_id': commit.raw_id,
203 }
203 }
204 assert_dirs_in_response(response, dirs, params)
204 assert_dirs_in_response(response, dirs, params)
205 assert_files_in_response(response, files, params)
205 assert_files_in_response(response, files, params)
206
206
207 def test_show_files_different_branch(self, backend):
207 def test_show_files_different_branch(self, backend):
208 branches = dict(
208 branches = dict(
209 hg=(150, ['git']),
209 hg=(150, ['git']),
210 # TODO: Git test repository does not contain other branches
210 # TODO: Git test repository does not contain other branches
211 git=(633, ['master']),
211 git=(633, ['master']),
212 # TODO: Branch support in Subversion
212 # TODO: Branch support in Subversion
213 svn=(150, [])
213 svn=(150, [])
214 )
214 )
215 idx, branches = branches[backend.alias]
215 idx, branches = branches[backend.alias]
216 commit = backend.repo.get_commit(commit_idx=idx)
216 commit = backend.repo.get_commit(commit_idx=idx)
217 response = self.app.get(
217 response = self.app.get(
218 route_path('repo_files',
218 route_path('repo_files',
219 repo_name=backend.repo_name,
219 repo_name=backend.repo_name,
220 commit_id=commit.raw_id, f_path='/'))
220 commit_id=commit.raw_id, f_path='/'))
221
221
222 assert_response = response.assert_response()
222 assert_response = response.assert_response()
223 for branch in branches:
223 for branch in branches:
224 assert_response.element_contains('.tags .branchtag', branch)
224 assert_response.element_contains('.tags .branchtag', branch)
225
225
226 def test_show_files_paging(self, backend):
226 def test_show_files_paging(self, backend):
227 repo = backend.repo
227 repo = backend.repo
228 indexes = [73, 92, 109, 1, 0]
228 indexes = [73, 92, 109, 1, 0]
229 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
229 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
230 for rev in indexes]
230 for rev in indexes]
231
231
232 for idx in idx_map:
232 for idx in idx_map:
233 response = self.app.get(
233 response = self.app.get(
234 route_path('repo_files',
234 route_path('repo_files',
235 repo_name=backend.repo_name,
235 repo_name=backend.repo_name,
236 commit_id=idx[1], f_path='/'))
236 commit_id=idx[1], f_path='/'))
237
237
238 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
238 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
239
239
240 def test_file_source(self, backend):
240 def test_file_source(self, backend):
241 commit = backend.repo.get_commit(commit_idx=167)
241 commit = backend.repo.get_commit(commit_idx=167)
242 response = self.app.get(
242 response = self.app.get(
243 route_path('repo_files',
243 route_path('repo_files',
244 repo_name=backend.repo_name,
244 repo_name=backend.repo_name,
245 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
245 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
246
246
247 msgbox = """<div class="commit">%s</div>"""
247 msgbox = """<div class="commit">%s</div>"""
248 response.mustcontain(msgbox % (commit.message, ))
248 response.mustcontain(msgbox % (commit.message, ))
249
249
250 assert_response = response.assert_response()
250 assert_response = response.assert_response()
251 if commit.branch:
251 if commit.branch:
252 assert_response.element_contains(
252 assert_response.element_contains(
253 '.tags.tags-main .branchtag', commit.branch)
253 '.tags.tags-main .branchtag', commit.branch)
254 if commit.tags:
254 if commit.tags:
255 for tag in commit.tags:
255 for tag in commit.tags:
256 assert_response.element_contains('.tags.tags-main .tagtag', tag)
256 assert_response.element_contains('.tags.tags-main .tagtag', tag)
257
257
258 def test_file_source_annotated(self, backend):
258 def test_file_source_annotated(self, backend):
259 response = self.app.get(
259 response = self.app.get(
260 route_path('repo_files:annotated',
260 route_path('repo_files:annotated',
261 repo_name=backend.repo_name,
261 repo_name=backend.repo_name,
262 commit_id='tip', f_path='vcs/nodes.py'))
262 commit_id='tip', f_path='vcs/nodes.py'))
263 expected_commits = {
263 expected_commits = {
264 'hg': 'r356',
264 'hg': 'r356',
265 'git': 'r345',
265 'git': 'r345',
266 'svn': 'r208',
266 'svn': 'r208',
267 }
267 }
268 response.mustcontain(expected_commits[backend.alias])
268 response.mustcontain(expected_commits[backend.alias])
269
269
270 def test_file_source_authors(self, backend):
270 def test_file_source_authors(self, backend):
271 response = self.app.get(
271 response = self.app.get(
272 route_path('repo_file_authors',
272 route_path('repo_file_authors',
273 repo_name=backend.repo_name,
273 repo_name=backend.repo_name,
274 commit_id='tip', f_path='vcs/nodes.py'))
274 commit_id='tip', f_path='vcs/nodes.py'))
275 expected_authors = {
275 expected_authors = {
276 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
276 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
277 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
277 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
278 'svn': ('marcin', 'lukasz'),
278 'svn': ('marcin', 'lukasz'),
279 }
279 }
280
280
281 for author in expected_authors[backend.alias]:
281 for author in expected_authors[backend.alias]:
282 response.mustcontain(author)
282 response.mustcontain(author)
283
283
284 def test_file_source_authors_with_annotation(self, backend):
284 def test_file_source_authors_with_annotation(self, backend):
285 response = self.app.get(
285 response = self.app.get(
286 route_path('repo_file_authors',
286 route_path('repo_file_authors',
287 repo_name=backend.repo_name,
287 repo_name=backend.repo_name,
288 commit_id='tip', f_path='vcs/nodes.py',
288 commit_id='tip', f_path='vcs/nodes.py',
289 params=dict(annotate=1)))
289 params=dict(annotate=1)))
290 expected_authors = {
290 expected_authors = {
291 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
291 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
292 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
292 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
293 'svn': ('marcin', 'lukasz'),
293 'svn': ('marcin', 'lukasz'),
294 }
294 }
295
295
296 for author in expected_authors[backend.alias]:
296 for author in expected_authors[backend.alias]:
297 response.mustcontain(author)
297 response.mustcontain(author)
298
298
299 def test_file_source_history(self, backend, xhr_header):
299 def test_file_source_history(self, backend, xhr_header):
300 response = self.app.get(
300 response = self.app.get(
301 route_path('repo_file_history',
301 route_path('repo_file_history',
302 repo_name=backend.repo_name,
302 repo_name=backend.repo_name,
303 commit_id='tip', f_path='vcs/nodes.py'),
303 commit_id='tip', f_path='vcs/nodes.py'),
304 extra_environ=xhr_header)
304 extra_environ=xhr_header)
305 assert get_node_history(backend.alias) == json.loads(response.body)
305 assert get_node_history(backend.alias) == json.loads(response.body)
306
306
307 def test_file_source_history_svn(self, backend_svn, xhr_header):
307 def test_file_source_history_svn(self, backend_svn, xhr_header):
308 simple_repo = backend_svn['svn-simple-layout']
308 simple_repo = backend_svn['svn-simple-layout']
309 response = self.app.get(
309 response = self.app.get(
310 route_path('repo_file_history',
310 route_path('repo_file_history',
311 repo_name=simple_repo.repo_name,
311 repo_name=simple_repo.repo_name,
312 commit_id='tip', f_path='trunk/example.py'),
312 commit_id='tip', f_path='trunk/example.py'),
313 extra_environ=xhr_header)
313 extra_environ=xhr_header)
314
314
315 expected_data = json.loads(
315 expected_data = json.loads(
316 fixture.load_resource('svn_node_history_branches.json'))
316 fixture.load_resource('svn_node_history_branches.json'))
317
317
318 assert expected_data == response.json
318 assert expected_data == response.json
319
319
320 def test_file_source_history_with_annotation(self, backend, xhr_header):
320 def test_file_source_history_with_annotation(self, backend, xhr_header):
321 response = self.app.get(
321 response = self.app.get(
322 route_path('repo_file_history',
322 route_path('repo_file_history',
323 repo_name=backend.repo_name,
323 repo_name=backend.repo_name,
324 commit_id='tip', f_path='vcs/nodes.py',
324 commit_id='tip', f_path='vcs/nodes.py',
325 params=dict(annotate=1)),
325 params=dict(annotate=1)),
326
326
327 extra_environ=xhr_header)
327 extra_environ=xhr_header)
328 assert get_node_history(backend.alias) == json.loads(response.body)
328 assert get_node_history(backend.alias) == json.loads(response.body)
329
329
330 def test_tree_search_top_level(self, backend, xhr_header):
330 def test_tree_search_top_level(self, backend, xhr_header):
331 commit = backend.repo.get_commit(commit_idx=173)
331 commit = backend.repo.get_commit(commit_idx=173)
332 response = self.app.get(
332 response = self.app.get(
333 route_path('repo_files_nodelist',
333 route_path('repo_files_nodelist',
334 repo_name=backend.repo_name,
334 repo_name=backend.repo_name,
335 commit_id=commit.raw_id, f_path='/'),
335 commit_id=commit.raw_id, f_path='/'),
336 extra_environ=xhr_header)
336 extra_environ=xhr_header)
337 assert 'nodes' in response.json
337 assert 'nodes' in response.json
338 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
338 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
339
339
340 def test_tree_search_missing_xhr(self, backend):
340 def test_tree_search_missing_xhr(self, backend):
341 self.app.get(
341 self.app.get(
342 route_path('repo_files_nodelist',
342 route_path('repo_files_nodelist',
343 repo_name=backend.repo_name,
343 repo_name=backend.repo_name,
344 commit_id='tip', f_path='/'),
344 commit_id='tip', f_path='/'),
345 status=404)
345 status=404)
346
346
347 def test_tree_search_at_path(self, backend, xhr_header):
347 def test_tree_search_at_path(self, backend, xhr_header):
348 commit = backend.repo.get_commit(commit_idx=173)
348 commit = backend.repo.get_commit(commit_idx=173)
349 response = self.app.get(
349 response = self.app.get(
350 route_path('repo_files_nodelist',
350 route_path('repo_files_nodelist',
351 repo_name=backend.repo_name,
351 repo_name=backend.repo_name,
352 commit_id=commit.raw_id, f_path='/docs'),
352 commit_id=commit.raw_id, f_path='/docs'),
353 extra_environ=xhr_header)
353 extra_environ=xhr_header)
354 assert 'nodes' in response.json
354 assert 'nodes' in response.json
355 nodes = response.json['nodes']
355 nodes = response.json['nodes']
356 assert {'name': 'docs/api', 'type': 'dir'} in nodes
356 assert {'name': 'docs/api', 'type': 'dir'} in nodes
357 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
357 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
358
358
359 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
359 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
360 commit = backend.repo.get_commit(commit_idx=173)
360 commit = backend.repo.get_commit(commit_idx=173)
361 response = self.app.get(
361 response = self.app.get(
362 route_path('repo_files_nodelist',
362 route_path('repo_files_nodelist',
363 repo_name=backend.repo_name,
363 repo_name=backend.repo_name,
364 commit_id=commit.raw_id, f_path='/docs/api'),
364 commit_id=commit.raw_id, f_path='/docs/api'),
365 extra_environ=xhr_header)
365 extra_environ=xhr_header)
366 assert 'nodes' in response.json
366 assert 'nodes' in response.json
367 nodes = response.json['nodes']
367 nodes = response.json['nodes']
368 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
368 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
369
369
370 def test_tree_search_at_path_missing_xhr(self, backend):
370 def test_tree_search_at_path_missing_xhr(self, backend):
371 self.app.get(
371 self.app.get(
372 route_path('repo_files_nodelist',
372 route_path('repo_files_nodelist',
373 repo_name=backend.repo_name,
373 repo_name=backend.repo_name,
374 commit_id='tip', f_path='/docs'),
374 commit_id='tip', f_path='/docs'),
375 status=404)
375 status=404)
376
376
377 def test_nodetree(self, backend, xhr_header):
377 def test_nodetree(self, backend, xhr_header):
378 commit = backend.repo.get_commit(commit_idx=173)
378 commit = backend.repo.get_commit(commit_idx=173)
379 response = self.app.get(
379 response = self.app.get(
380 route_path('repo_nodetree_full',
380 route_path('repo_nodetree_full',
381 repo_name=backend.repo_name,
381 repo_name=backend.repo_name,
382 commit_id=commit.raw_id, f_path='/'),
382 commit_id=commit.raw_id, f_path='/'),
383 extra_environ=xhr_header)
383 extra_environ=xhr_header)
384
384
385 assert_response = response.assert_response()
385 assert_response = response.assert_response()
386
386
387 for attr in ['data-commit-id', 'data-date', 'data-author']:
387 for attr in ['data-commit-id', 'data-date', 'data-author']:
388 elements = assert_response.get_elements('[{}]'.format(attr))
388 elements = assert_response.get_elements('[{}]'.format(attr))
389 assert len(elements) > 1
389 assert len(elements) > 1
390
390
391 for element in elements:
391 for element in elements:
392 assert element.get(attr)
392 assert element.get(attr)
393
393
394 def test_nodetree_if_file(self, backend, xhr_header):
394 def test_nodetree_if_file(self, backend, xhr_header):
395 commit = backend.repo.get_commit(commit_idx=173)
395 commit = backend.repo.get_commit(commit_idx=173)
396 response = self.app.get(
396 response = self.app.get(
397 route_path('repo_nodetree_full',
397 route_path('repo_nodetree_full',
398 repo_name=backend.repo_name,
398 repo_name=backend.repo_name,
399 commit_id=commit.raw_id, f_path='README.rst'),
399 commit_id=commit.raw_id, f_path='README.rst'),
400 extra_environ=xhr_header)
400 extra_environ=xhr_header)
401 assert response.body == ''
401 assert response.text == ''
402
402
403 def test_nodetree_wrong_path(self, backend, xhr_header):
403 def test_nodetree_wrong_path(self, backend, xhr_header):
404 commit = backend.repo.get_commit(commit_idx=173)
404 commit = backend.repo.get_commit(commit_idx=173)
405 response = self.app.get(
405 response = self.app.get(
406 route_path('repo_nodetree_full',
406 route_path('repo_nodetree_full',
407 repo_name=backend.repo_name,
407 repo_name=backend.repo_name,
408 commit_id=commit.raw_id, f_path='/dont-exist'),
408 commit_id=commit.raw_id, f_path='/dont-exist'),
409 extra_environ=xhr_header)
409 extra_environ=xhr_header)
410
410
411 err = 'error: There is no file nor ' \
411 err = 'error: There is no file nor ' \
412 'directory at the given path'
412 'directory at the given path'
413 assert err in response.body
413 assert err in response.text
414
414
415 def test_nodetree_missing_xhr(self, backend):
415 def test_nodetree_missing_xhr(self, backend):
416 self.app.get(
416 self.app.get(
417 route_path('repo_nodetree_full',
417 route_path('repo_nodetree_full',
418 repo_name=backend.repo_name,
418 repo_name=backend.repo_name,
419 commit_id='tip', f_path='/'),
419 commit_id='tip', f_path='/'),
420 status=404)
420 status=404)
421
421
422
422
423 @pytest.mark.usefixtures("app", "autologin_user")
423 @pytest.mark.usefixtures("app", "autologin_user")
424 class TestRawFileHandling(object):
424 class TestRawFileHandling(object):
425
425
426 def test_download_file(self, backend):
426 def test_download_file(self, backend):
427 commit = backend.repo.get_commit(commit_idx=173)
427 commit = backend.repo.get_commit(commit_idx=173)
428 response = self.app.get(
428 response = self.app.get(
429 route_path('repo_file_download',
429 route_path('repo_file_download',
430 repo_name=backend.repo_name,
430 repo_name=backend.repo_name,
431 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
431 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
432
432
433 assert response.content_disposition == 'attachment; filename="nodes.py"; filename*=UTF-8\'\'nodes.py'
433 assert response.content_disposition == 'attachment; filename="nodes.py"; filename*=UTF-8\'\'nodes.py'
434 assert response.content_type == "text/x-python"
434 assert response.content_type == "text/x-python"
435
435
436 def test_download_file_wrong_cs(self, backend):
436 def test_download_file_wrong_cs(self, backend):
437 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
437 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
438
438
439 response = self.app.get(
439 response = self.app.get(
440 route_path('repo_file_download',
440 route_path('repo_file_download',
441 repo_name=backend.repo_name,
441 repo_name=backend.repo_name,
442 commit_id=raw_id, f_path='vcs/nodes.svg'),
442 commit_id=raw_id, f_path='vcs/nodes.svg'),
443 status=404)
443 status=404)
444
444
445 msg = """No such commit exists for this repository"""
445 msg = """No such commit exists for this repository"""
446 response.mustcontain(msg)
446 response.mustcontain(msg)
447
447
448 def test_download_file_wrong_f_path(self, backend):
448 def test_download_file_wrong_f_path(self, backend):
449 commit = backend.repo.get_commit(commit_idx=173)
449 commit = backend.repo.get_commit(commit_idx=173)
450 f_path = 'vcs/ERRORnodes.py'
450 f_path = 'vcs/ERRORnodes.py'
451
451
452 response = self.app.get(
452 response = self.app.get(
453 route_path('repo_file_download',
453 route_path('repo_file_download',
454 repo_name=backend.repo_name,
454 repo_name=backend.repo_name,
455 commit_id=commit.raw_id, f_path=f_path),
455 commit_id=commit.raw_id, f_path=f_path),
456 status=404)
456 status=404)
457
457
458 msg = (
458 msg = (
459 "There is no file nor directory at the given path: "
459 "There is no file nor directory at the given path: "
460 "`%s` at commit %s" % (f_path, commit.short_id))
460 "`%s` at commit %s" % (f_path, commit.short_id))
461 response.mustcontain(msg)
461 response.mustcontain(msg)
462
462
463 def test_file_raw(self, backend):
463 def test_file_raw(self, backend):
464 commit = backend.repo.get_commit(commit_idx=173)
464 commit = backend.repo.get_commit(commit_idx=173)
465 response = self.app.get(
465 response = self.app.get(
466 route_path('repo_file_raw',
466 route_path('repo_file_raw',
467 repo_name=backend.repo_name,
467 repo_name=backend.repo_name,
468 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
468 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
469
469
470 assert response.content_type == "text/plain"
470 assert response.content_type == "text/plain"
471
471
472 def test_file_raw_binary(self, backend):
472 def test_file_raw_binary(self, backend):
473 commit = backend.repo.get_commit()
473 commit = backend.repo.get_commit()
474 response = self.app.get(
474 response = self.app.get(
475 route_path('repo_file_raw',
475 route_path('repo_file_raw',
476 repo_name=backend.repo_name,
476 repo_name=backend.repo_name,
477 commit_id=commit.raw_id,
477 commit_id=commit.raw_id,
478 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
478 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
479
479
480 assert response.content_disposition == 'inline'
480 assert response.content_disposition == 'inline'
481
481
482 def test_raw_file_wrong_cs(self, backend):
482 def test_raw_file_wrong_cs(self, backend):
483 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
483 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
484
484
485 response = self.app.get(
485 response = self.app.get(
486 route_path('repo_file_raw',
486 route_path('repo_file_raw',
487 repo_name=backend.repo_name,
487 repo_name=backend.repo_name,
488 commit_id=raw_id, f_path='vcs/nodes.svg'),
488 commit_id=raw_id, f_path='vcs/nodes.svg'),
489 status=404)
489 status=404)
490
490
491 msg = """No such commit exists for this repository"""
491 msg = """No such commit exists for this repository"""
492 response.mustcontain(msg)
492 response.mustcontain(msg)
493
493
494 def test_raw_wrong_f_path(self, backend):
494 def test_raw_wrong_f_path(self, backend):
495 commit = backend.repo.get_commit(commit_idx=173)
495 commit = backend.repo.get_commit(commit_idx=173)
496 f_path = 'vcs/ERRORnodes.py'
496 f_path = 'vcs/ERRORnodes.py'
497 response = self.app.get(
497 response = self.app.get(
498 route_path('repo_file_raw',
498 route_path('repo_file_raw',
499 repo_name=backend.repo_name,
499 repo_name=backend.repo_name,
500 commit_id=commit.raw_id, f_path=f_path),
500 commit_id=commit.raw_id, f_path=f_path),
501 status=404)
501 status=404)
502
502
503 msg = (
503 msg = (
504 "There is no file nor directory at the given path: "
504 "There is no file nor directory at the given path: "
505 "`%s` at commit %s" % (f_path, commit.short_id))
505 "`%s` at commit %s" % (f_path, commit.short_id))
506 response.mustcontain(msg)
506 response.mustcontain(msg)
507
507
508 def test_raw_svg_should_not_be_rendered(self, backend):
508 def test_raw_svg_should_not_be_rendered(self, backend):
509 backend.create_repo()
509 backend.create_repo()
510 backend.ensure_file("xss.svg")
510 backend.ensure_file("xss.svg")
511 response = self.app.get(
511 response = self.app.get(
512 route_path('repo_file_raw',
512 route_path('repo_file_raw',
513 repo_name=backend.repo_name,
513 repo_name=backend.repo_name,
514 commit_id='tip', f_path='xss.svg'),)
514 commit_id='tip', f_path='xss.svg'),)
515 # If the content type is image/svg+xml then it allows to render HTML
515 # If the content type is image/svg+xml then it allows to render HTML
516 # and malicious SVG.
516 # and malicious SVG.
517 assert response.content_type == "text/plain"
517 assert response.content_type == "text/plain"
518
518
519
519
520 @pytest.mark.usefixtures("app")
520 @pytest.mark.usefixtures("app")
521 class TestRepositoryArchival(object):
521 class TestRepositoryArchival(object):
522
522
523 def test_archival(self, backend):
523 def test_archival(self, backend):
524 backend.enable_downloads()
524 backend.enable_downloads()
525 commit = backend.repo.get_commit(commit_idx=173)
525 commit = backend.repo.get_commit(commit_idx=173)
526 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
526 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
527
527
528 short = commit.short_id + extension
528 short = commit.short_id + extension
529 fname = commit.raw_id + extension
529 fname = commit.raw_id + extension
530 filename = '%s-%s' % (backend.repo_name, short)
530 filename = '%s-%s' % (backend.repo_name, short)
531 response = self.app.get(
531 response = self.app.get(
532 route_path('repo_archivefile',
532 route_path('repo_archivefile',
533 repo_name=backend.repo_name,
533 repo_name=backend.repo_name,
534 fname=fname))
534 fname=fname))
535
535
536 assert response.status == '200 OK'
536 assert response.status == '200 OK'
537 headers = [
537 headers = [
538 ('Content-Disposition', 'attachment; filename=%s' % filename),
538 ('Content-Disposition', 'attachment; filename=%s' % filename),
539 ('Content-Type', '%s' % content_type),
539 ('Content-Type', '%s' % content_type),
540 ]
540 ]
541
541
542 for header in headers:
542 for header in headers:
543 assert header in response.headers.items()
543 assert header in response.headers.items()
544
544
545 def test_archival_no_hash(self, backend):
545 def test_archival_no_hash(self, backend):
546 backend.enable_downloads()
546 backend.enable_downloads()
547 commit = backend.repo.get_commit(commit_idx=173)
547 commit = backend.repo.get_commit(commit_idx=173)
548 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
548 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
549
549
550 short = 'plain' + extension
550 short = 'plain' + extension
551 fname = commit.raw_id + extension
551 fname = commit.raw_id + extension
552 filename = '%s-%s' % (backend.repo_name, short)
552 filename = '%s-%s' % (backend.repo_name, short)
553 response = self.app.get(
553 response = self.app.get(
554 route_path('repo_archivefile',
554 route_path('repo_archivefile',
555 repo_name=backend.repo_name,
555 repo_name=backend.repo_name,
556 fname=fname, params={'with_hash': 0}))
556 fname=fname, params={'with_hash': 0}))
557
557
558 assert response.status == '200 OK'
558 assert response.status == '200 OK'
559 headers = [
559 headers = [
560 ('Content-Disposition', 'attachment; filename=%s' % filename),
560 ('Content-Disposition', 'attachment; filename=%s' % filename),
561 ('Content-Type', '%s' % content_type),
561 ('Content-Type', '%s' % content_type),
562 ]
562 ]
563
563
564 for header in headers:
564 for header in headers:
565 assert header in response.headers.items()
565 assert header in response.headers.items()
566
566
567 @pytest.mark.parametrize('arch_ext',[
567 @pytest.mark.parametrize('arch_ext',[
568 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
568 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
569 def test_archival_wrong_ext(self, backend, arch_ext):
569 def test_archival_wrong_ext(self, backend, arch_ext):
570 backend.enable_downloads()
570 backend.enable_downloads()
571 commit = backend.repo.get_commit(commit_idx=173)
571 commit = backend.repo.get_commit(commit_idx=173)
572
572
573 fname = commit.raw_id + '.' + arch_ext
573 fname = commit.raw_id + '.' + arch_ext
574
574
575 response = self.app.get(
575 response = self.app.get(
576 route_path('repo_archivefile',
576 route_path('repo_archivefile',
577 repo_name=backend.repo_name,
577 repo_name=backend.repo_name,
578 fname=fname))
578 fname=fname))
579 response.mustcontain(
579 response.mustcontain(
580 'Unknown archive type for: `{}`'.format(fname))
580 'Unknown archive type for: `{}`'.format(fname))
581
581
582 @pytest.mark.parametrize('commit_id', [
582 @pytest.mark.parametrize('commit_id', [
583 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
583 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
584 def test_archival_wrong_commit_id(self, backend, commit_id):
584 def test_archival_wrong_commit_id(self, backend, commit_id):
585 backend.enable_downloads()
585 backend.enable_downloads()
586 fname = '%s.zip' % commit_id
586 fname = '%s.zip' % commit_id
587
587
588 response = self.app.get(
588 response = self.app.get(
589 route_path('repo_archivefile',
589 route_path('repo_archivefile',
590 repo_name=backend.repo_name,
590 repo_name=backend.repo_name,
591 fname=fname))
591 fname=fname))
592 response.mustcontain('Unknown commit_id')
592 response.mustcontain('Unknown commit_id')
593
593
594
594
595 @pytest.mark.usefixtures("app")
595 @pytest.mark.usefixtures("app")
596 class TestFilesDiff(object):
596 class TestFilesDiff(object):
597
597
598 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
598 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
599 def test_file_full_diff(self, backend, diff):
599 def test_file_full_diff(self, backend, diff):
600 commit1 = backend.repo.get_commit(commit_idx=-1)
600 commit1 = backend.repo.get_commit(commit_idx=-1)
601 commit2 = backend.repo.get_commit(commit_idx=-2)
601 commit2 = backend.repo.get_commit(commit_idx=-2)
602
602
603 response = self.app.get(
603 response = self.app.get(
604 route_path('repo_files_diff',
604 route_path('repo_files_diff',
605 repo_name=backend.repo_name,
605 repo_name=backend.repo_name,
606 f_path='README'),
606 f_path='README'),
607 params={
607 params={
608 'diff1': commit2.raw_id,
608 'diff1': commit2.raw_id,
609 'diff2': commit1.raw_id,
609 'diff2': commit1.raw_id,
610 'fulldiff': '1',
610 'fulldiff': '1',
611 'diff': diff,
611 'diff': diff,
612 })
612 })
613
613
614 if diff == 'diff':
614 if diff == 'diff':
615 # use redirect since this is OLD view redirecting to compare page
615 # use redirect since this is OLD view redirecting to compare page
616 response = response.follow()
616 response = response.follow()
617
617
618 # It's a symlink to README.rst
618 # It's a symlink to README.rst
619 response.mustcontain('README.rst')
619 response.mustcontain('README.rst')
620 response.mustcontain('No newline at end of file')
620 response.mustcontain('No newline at end of file')
621
621
622 def test_file_binary_diff(self, backend):
622 def test_file_binary_diff(self, backend):
623 commits = [
623 commits = [
624 {'message': 'First commit'},
624 {'message': 'First commit'},
625 {'message': 'Commit with binary',
625 {'message': 'Commit with binary',
626 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
626 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
627 ]
627 ]
628 repo = backend.create_repo(commits=commits)
628 repo = backend.create_repo(commits=commits)
629
629
630 response = self.app.get(
630 response = self.app.get(
631 route_path('repo_files_diff',
631 route_path('repo_files_diff',
632 repo_name=backend.repo_name,
632 repo_name=backend.repo_name,
633 f_path='file.bin'),
633 f_path='file.bin'),
634 params={
634 params={
635 'diff1': repo.get_commit(commit_idx=0).raw_id,
635 'diff1': repo.get_commit(commit_idx=0).raw_id,
636 'diff2': repo.get_commit(commit_idx=1).raw_id,
636 'diff2': repo.get_commit(commit_idx=1).raw_id,
637 'fulldiff': '1',
637 'fulldiff': '1',
638 'diff': 'diff',
638 'diff': 'diff',
639 })
639 })
640 # use redirect since this is OLD view redirecting to compare page
640 # use redirect since this is OLD view redirecting to compare page
641 response = response.follow()
641 response = response.follow()
642 response.mustcontain('Collapse 1 commit')
642 response.mustcontain('Collapse 1 commit')
643 file_changes = (1, 0, 0)
643 file_changes = (1, 0, 0)
644
644
645 compare_page = ComparePage(response)
645 compare_page = ComparePage(response)
646 compare_page.contains_change_summary(*file_changes)
646 compare_page.contains_change_summary(*file_changes)
647
647
648 if backend.alias == 'svn':
648 if backend.alias == 'svn':
649 response.mustcontain('new file 10644')
649 response.mustcontain('new file 10644')
650 # TODO(marcink): SVN doesn't yet detect binary changes
650 # TODO(marcink): SVN doesn't yet detect binary changes
651 else:
651 else:
652 response.mustcontain('new file 100644')
652 response.mustcontain('new file 100644')
653 response.mustcontain('binary diff hidden')
653 response.mustcontain('binary diff hidden')
654
654
655 def test_diff_2way(self, backend):
655 def test_diff_2way(self, backend):
656 commit1 = backend.repo.get_commit(commit_idx=-1)
656 commit1 = backend.repo.get_commit(commit_idx=-1)
657 commit2 = backend.repo.get_commit(commit_idx=-2)
657 commit2 = backend.repo.get_commit(commit_idx=-2)
658 response = self.app.get(
658 response = self.app.get(
659 route_path('repo_files_diff_2way_redirect',
659 route_path('repo_files_diff_2way_redirect',
660 repo_name=backend.repo_name,
660 repo_name=backend.repo_name,
661 f_path='README'),
661 f_path='README'),
662 params={
662 params={
663 'diff1': commit2.raw_id,
663 'diff1': commit2.raw_id,
664 'diff2': commit1.raw_id,
664 'diff2': commit1.raw_id,
665 })
665 })
666 # use redirect since this is OLD view redirecting to compare page
666 # use redirect since this is OLD view redirecting to compare page
667 response = response.follow()
667 response = response.follow()
668
668
669 # It's a symlink to README.rst
669 # It's a symlink to README.rst
670 response.mustcontain('README.rst')
670 response.mustcontain('README.rst')
671 response.mustcontain('No newline at end of file')
671 response.mustcontain('No newline at end of file')
672
672
673 def test_requires_one_commit_id(self, backend, autologin_user):
673 def test_requires_one_commit_id(self, backend, autologin_user):
674 response = self.app.get(
674 response = self.app.get(
675 route_path('repo_files_diff',
675 route_path('repo_files_diff',
676 repo_name=backend.repo_name,
676 repo_name=backend.repo_name,
677 f_path='README.rst'),
677 f_path='README.rst'),
678 status=400)
678 status=400)
679 response.mustcontain(
679 response.mustcontain(
680 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
680 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
681
681
682 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
682 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
683 repo = vcsbackend.repo
683 repo = vcsbackend.repo
684 response = self.app.get(
684 response = self.app.get(
685 route_path('repo_files_diff',
685 route_path('repo_files_diff',
686 repo_name=repo.name,
686 repo_name=repo.name,
687 f_path='does-not-exist-in-any-commit'),
687 f_path='does-not-exist-in-any-commit'),
688 params={
688 params={
689 'diff1': repo[0].raw_id,
689 'diff1': repo[0].raw_id,
690 'diff2': repo[1].raw_id
690 'diff2': repo[1].raw_id
691 })
691 })
692
692
693 response = response.follow()
693 response = response.follow()
694 response.mustcontain('No files')
694 response.mustcontain('No files')
695
695
696 def test_returns_redirect_if_file_not_changed(self, backend):
696 def test_returns_redirect_if_file_not_changed(self, backend):
697 commit = backend.repo.get_commit(commit_idx=-1)
697 commit = backend.repo.get_commit(commit_idx=-1)
698 response = self.app.get(
698 response = self.app.get(
699 route_path('repo_files_diff_2way_redirect',
699 route_path('repo_files_diff_2way_redirect',
700 repo_name=backend.repo_name,
700 repo_name=backend.repo_name,
701 f_path='README'),
701 f_path='README'),
702 params={
702 params={
703 'diff1': commit.raw_id,
703 'diff1': commit.raw_id,
704 'diff2': commit.raw_id,
704 'diff2': commit.raw_id,
705 })
705 })
706
706
707 response = response.follow()
707 response = response.follow()
708 response.mustcontain('No files')
708 response.mustcontain('No files')
709 response.mustcontain('No commits in this compare')
709 response.mustcontain('No commits in this compare')
710
710
711 def test_supports_diff_to_different_path_svn(self, backend_svn):
711 def test_supports_diff_to_different_path_svn(self, backend_svn):
712 #TODO: check this case
712 #TODO: check this case
713 return
713 return
714
714
715 repo = backend_svn['svn-simple-layout'].scm_instance()
715 repo = backend_svn['svn-simple-layout'].scm_instance()
716 commit_id_1 = '24'
716 commit_id_1 = '24'
717 commit_id_2 = '26'
717 commit_id_2 = '26'
718
718
719 response = self.app.get(
719 response = self.app.get(
720 route_path('repo_files_diff',
720 route_path('repo_files_diff',
721 repo_name=backend_svn.repo_name,
721 repo_name=backend_svn.repo_name,
722 f_path='trunk/example.py'),
722 f_path='trunk/example.py'),
723 params={
723 params={
724 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
724 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
725 'diff2': commit_id_2,
725 'diff2': commit_id_2,
726 })
726 })
727
727
728 response = response.follow()
728 response = response.follow()
729 response.mustcontain(
729 response.mustcontain(
730 # diff contains this
730 # diff contains this
731 "Will print out a useful message on invocation.")
731 "Will print out a useful message on invocation.")
732
732
733 # Note: Expecting that we indicate the user what's being compared
733 # Note: Expecting that we indicate the user what's being compared
734 response.mustcontain("trunk/example.py")
734 response.mustcontain("trunk/example.py")
735 response.mustcontain("tags/v0.2/example.py")
735 response.mustcontain("tags/v0.2/example.py")
736
736
737 def test_show_rev_redirects_to_svn_path(self, backend_svn):
737 def test_show_rev_redirects_to_svn_path(self, backend_svn):
738 #TODO: check this case
738 #TODO: check this case
739 return
739 return
740
740
741 repo = backend_svn['svn-simple-layout'].scm_instance()
741 repo = backend_svn['svn-simple-layout'].scm_instance()
742 commit_id = repo[-1].raw_id
742 commit_id = repo[-1].raw_id
743
743
744 response = self.app.get(
744 response = self.app.get(
745 route_path('repo_files_diff',
745 route_path('repo_files_diff',
746 repo_name=backend_svn.repo_name,
746 repo_name=backend_svn.repo_name,
747 f_path='trunk/example.py'),
747 f_path='trunk/example.py'),
748 params={
748 params={
749 'diff1': 'branches/argparse/example.py@' + commit_id,
749 'diff1': 'branches/argparse/example.py@' + commit_id,
750 'diff2': commit_id,
750 'diff2': commit_id,
751 },
751 },
752 status=302)
752 status=302)
753 response = response.follow()
753 response = response.follow()
754 assert response.headers['Location'].endswith(
754 assert response.headers['Location'].endswith(
755 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
755 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
756
756
757 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
757 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
758 #TODO: check this case
758 #TODO: check this case
759 return
759 return
760
760
761 repo = backend_svn['svn-simple-layout'].scm_instance()
761 repo = backend_svn['svn-simple-layout'].scm_instance()
762 commit_id = repo[-1].raw_id
762 commit_id = repo[-1].raw_id
763 response = self.app.get(
763 response = self.app.get(
764 route_path('repo_files_diff',
764 route_path('repo_files_diff',
765 repo_name=backend_svn.repo_name,
765 repo_name=backend_svn.repo_name,
766 f_path='trunk/example.py'),
766 f_path='trunk/example.py'),
767 params={
767 params={
768 'diff1': 'branches/argparse/example.py@' + commit_id,
768 'diff1': 'branches/argparse/example.py@' + commit_id,
769 'diff2': commit_id,
769 'diff2': commit_id,
770 'show_rev': 'Show at Revision',
770 'show_rev': 'Show at Revision',
771 'annotate': 'true',
771 'annotate': 'true',
772 },
772 },
773 status=302)
773 status=302)
774 response = response.follow()
774 response = response.follow()
775 assert response.headers['Location'].endswith(
775 assert response.headers['Location'].endswith(
776 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
776 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
777
777
778
778
779 @pytest.mark.usefixtures("app", "autologin_user")
779 @pytest.mark.usefixtures("app", "autologin_user")
780 class TestModifyFilesWithWebInterface(object):
780 class TestModifyFilesWithWebInterface(object):
781
781
782 def test_add_file_view(self, backend):
782 def test_add_file_view(self, backend):
783 self.app.get(
783 self.app.get(
784 route_path('repo_files_add_file',
784 route_path('repo_files_add_file',
785 repo_name=backend.repo_name,
785 repo_name=backend.repo_name,
786 commit_id='tip', f_path='/')
786 commit_id='tip', f_path='/')
787 )
787 )
788
788
789 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
789 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
790 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
790 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
791 backend.create_repo()
791 backend.create_repo()
792 filename = 'init.py'
792 filename = 'init.py'
793 response = self.app.post(
793 response = self.app.post(
794 route_path('repo_files_create_file',
794 route_path('repo_files_create_file',
795 repo_name=backend.repo_name,
795 repo_name=backend.repo_name,
796 commit_id='tip', f_path='/'),
796 commit_id='tip', f_path='/'),
797 params={
797 params={
798 'content': "",
798 'content': "",
799 'filename': filename,
799 'filename': filename,
800 'csrf_token': csrf_token,
800 'csrf_token': csrf_token,
801 },
801 },
802 status=302)
802 status=302)
803 expected_msg = 'Successfully committed new file `{}`'.format(os.path.join(filename))
803 expected_msg = 'Successfully committed new file `{}`'.format(os.path.join(filename))
804 assert_session_flash(response, expected_msg)
804 assert_session_flash(response, expected_msg)
805
805
806 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
806 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
807 commit_id = backend.repo.get_commit().raw_id
807 commit_id = backend.repo.get_commit().raw_id
808 response = self.app.post(
808 response = self.app.post(
809 route_path('repo_files_create_file',
809 route_path('repo_files_create_file',
810 repo_name=backend.repo_name,
810 repo_name=backend.repo_name,
811 commit_id=commit_id, f_path='/'),
811 commit_id=commit_id, f_path='/'),
812 params={
812 params={
813 'content': "foo",
813 'content': "foo",
814 'csrf_token': csrf_token,
814 'csrf_token': csrf_token,
815 },
815 },
816 status=302)
816 status=302)
817
817
818 assert_session_flash(response, 'No filename specified')
818 assert_session_flash(response, 'No filename specified')
819
819
820 def test_add_file_into_repo_errors_and_no_commits(
820 def test_add_file_into_repo_errors_and_no_commits(
821 self, backend, csrf_token):
821 self, backend, csrf_token):
822 repo = backend.create_repo()
822 repo = backend.create_repo()
823 # Create a file with no filename, it will display an error but
823 # Create a file with no filename, it will display an error but
824 # the repo has no commits yet
824 # the repo has no commits yet
825 response = self.app.post(
825 response = self.app.post(
826 route_path('repo_files_create_file',
826 route_path('repo_files_create_file',
827 repo_name=repo.repo_name,
827 repo_name=repo.repo_name,
828 commit_id='tip', f_path='/'),
828 commit_id='tip', f_path='/'),
829 params={
829 params={
830 'content': "foo",
830 'content': "foo",
831 'csrf_token': csrf_token,
831 'csrf_token': csrf_token,
832 },
832 },
833 status=302)
833 status=302)
834
834
835 assert_session_flash(response, 'No filename specified')
835 assert_session_flash(response, 'No filename specified')
836
836
837 # Not allowed, redirect to the summary
837 # Not allowed, redirect to the summary
838 redirected = response.follow()
838 redirected = response.follow()
839 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
839 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
840
840
841 # As there are no commits, displays the summary page with the error of
841 # As there are no commits, displays the summary page with the error of
842 # creating a file with no filename
842 # creating a file with no filename
843
843
844 assert redirected.request.path == summary_url
844 assert redirected.request.path == summary_url
845
845
846 @pytest.mark.parametrize("filename, clean_filename", [
846 @pytest.mark.parametrize("filename, clean_filename", [
847 ('/abs/foo', 'abs/foo'),
847 ('/abs/foo', 'abs/foo'),
848 ('../rel/foo', 'rel/foo'),
848 ('../rel/foo', 'rel/foo'),
849 ('file/../foo/foo', 'file/foo/foo'),
849 ('file/../foo/foo', 'file/foo/foo'),
850 ])
850 ])
851 def test_add_file_into_repo_bad_filenames(self, filename, clean_filename, backend, csrf_token):
851 def test_add_file_into_repo_bad_filenames(self, filename, clean_filename, backend, csrf_token):
852 repo = backend.create_repo()
852 repo = backend.create_repo()
853 commit_id = repo.get_commit().raw_id
853 commit_id = repo.get_commit().raw_id
854
854
855 response = self.app.post(
855 response = self.app.post(
856 route_path('repo_files_create_file',
856 route_path('repo_files_create_file',
857 repo_name=repo.repo_name,
857 repo_name=repo.repo_name,
858 commit_id=commit_id, f_path='/'),
858 commit_id=commit_id, f_path='/'),
859 params={
859 params={
860 'content': "foo",
860 'content': "foo",
861 'filename': filename,
861 'filename': filename,
862 'csrf_token': csrf_token,
862 'csrf_token': csrf_token,
863 },
863 },
864 status=302)
864 status=302)
865
865
866 expected_msg = 'Successfully committed new file `{}`'.format(clean_filename)
866 expected_msg = 'Successfully committed new file `{}`'.format(clean_filename)
867 assert_session_flash(response, expected_msg)
867 assert_session_flash(response, expected_msg)
868
868
869 @pytest.mark.parametrize("cnt, filename, content", [
869 @pytest.mark.parametrize("cnt, filename, content", [
870 (1, 'foo.txt', "Content"),
870 (1, 'foo.txt', "Content"),
871 (2, 'dir/foo.rst', "Content"),
871 (2, 'dir/foo.rst', "Content"),
872 (3, 'dir/foo-second.rst', "Content"),
872 (3, 'dir/foo-second.rst', "Content"),
873 (4, 'rel/dir/foo.bar', "Content"),
873 (4, 'rel/dir/foo.bar', "Content"),
874 ])
874 ])
875 def test_add_file_into_empty_repo(self, cnt, filename, content, backend, csrf_token):
875 def test_add_file_into_empty_repo(self, cnt, filename, content, backend, csrf_token):
876 repo = backend.create_repo()
876 repo = backend.create_repo()
877 commit_id = repo.get_commit().raw_id
877 commit_id = repo.get_commit().raw_id
878 response = self.app.post(
878 response = self.app.post(
879 route_path('repo_files_create_file',
879 route_path('repo_files_create_file',
880 repo_name=repo.repo_name,
880 repo_name=repo.repo_name,
881 commit_id=commit_id, f_path='/'),
881 commit_id=commit_id, f_path='/'),
882 params={
882 params={
883 'content': content,
883 'content': content,
884 'filename': filename,
884 'filename': filename,
885 'csrf_token': csrf_token,
885 'csrf_token': csrf_token,
886 },
886 },
887 status=302)
887 status=302)
888
888
889 expected_msg = 'Successfully committed new file `{}`'.format(filename)
889 expected_msg = 'Successfully committed new file `{}`'.format(filename)
890 assert_session_flash(response, expected_msg)
890 assert_session_flash(response, expected_msg)
891
891
892 def test_edit_file_view(self, backend):
892 def test_edit_file_view(self, backend):
893 response = self.app.get(
893 response = self.app.get(
894 route_path('repo_files_edit_file',
894 route_path('repo_files_edit_file',
895 repo_name=backend.repo_name,
895 repo_name=backend.repo_name,
896 commit_id=backend.default_head_id,
896 commit_id=backend.default_head_id,
897 f_path='vcs/nodes.py'),
897 f_path='vcs/nodes.py'),
898 status=200)
898 status=200)
899 response.mustcontain("Module holding everything related to vcs nodes.")
899 response.mustcontain("Module holding everything related to vcs nodes.")
900
900
901 def test_edit_file_view_not_on_branch(self, backend):
901 def test_edit_file_view_not_on_branch(self, backend):
902 repo = backend.create_repo()
902 repo = backend.create_repo()
903 backend.ensure_file("vcs/nodes.py")
903 backend.ensure_file("vcs/nodes.py")
904
904
905 response = self.app.get(
905 response = self.app.get(
906 route_path('repo_files_edit_file',
906 route_path('repo_files_edit_file',
907 repo_name=repo.repo_name,
907 repo_name=repo.repo_name,
908 commit_id='tip',
908 commit_id='tip',
909 f_path='vcs/nodes.py'),
909 f_path='vcs/nodes.py'),
910 status=302)
910 status=302)
911 assert_session_flash(
911 assert_session_flash(
912 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
912 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
913
913
914 def test_edit_file_view_commit_changes(self, backend, csrf_token):
914 def test_edit_file_view_commit_changes(self, backend, csrf_token):
915 repo = backend.create_repo()
915 repo = backend.create_repo()
916 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
916 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
917
917
918 response = self.app.post(
918 response = self.app.post(
919 route_path('repo_files_update_file',
919 route_path('repo_files_update_file',
920 repo_name=repo.repo_name,
920 repo_name=repo.repo_name,
921 commit_id=backend.default_head_id,
921 commit_id=backend.default_head_id,
922 f_path='vcs/nodes.py'),
922 f_path='vcs/nodes.py'),
923 params={
923 params={
924 'content': "print 'hello world'",
924 'content': "print 'hello world'",
925 'message': 'I committed',
925 'message': 'I committed',
926 'filename': "vcs/nodes.py",
926 'filename': "vcs/nodes.py",
927 'csrf_token': csrf_token,
927 'csrf_token': csrf_token,
928 },
928 },
929 status=302)
929 status=302)
930 assert_session_flash(
930 assert_session_flash(
931 response, 'Successfully committed changes to file `vcs/nodes.py`')
931 response, 'Successfully committed changes to file `vcs/nodes.py`')
932 tip = repo.get_commit(commit_idx=-1)
932 tip = repo.get_commit(commit_idx=-1)
933 assert tip.message == 'I committed'
933 assert tip.message == 'I committed'
934
934
935 def test_edit_file_view_commit_changes_default_message(self, backend,
935 def test_edit_file_view_commit_changes_default_message(self, backend,
936 csrf_token):
936 csrf_token):
937 repo = backend.create_repo()
937 repo = backend.create_repo()
938 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
938 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
939
939
940 commit_id = (
940 commit_id = (
941 backend.default_branch_name or
941 backend.default_branch_name or
942 backend.repo.scm_instance().commit_ids[-1])
942 backend.repo.scm_instance().commit_ids[-1])
943
943
944 response = self.app.post(
944 response = self.app.post(
945 route_path('repo_files_update_file',
945 route_path('repo_files_update_file',
946 repo_name=repo.repo_name,
946 repo_name=repo.repo_name,
947 commit_id=commit_id,
947 commit_id=commit_id,
948 f_path='vcs/nodes.py'),
948 f_path='vcs/nodes.py'),
949 params={
949 params={
950 'content': "print 'hello world'",
950 'content': "print 'hello world'",
951 'message': '',
951 'message': '',
952 'filename': "vcs/nodes.py",
952 'filename': "vcs/nodes.py",
953 'csrf_token': csrf_token,
953 'csrf_token': csrf_token,
954 },
954 },
955 status=302)
955 status=302)
956 assert_session_flash(
956 assert_session_flash(
957 response, 'Successfully committed changes to file `vcs/nodes.py`')
957 response, 'Successfully committed changes to file `vcs/nodes.py`')
958 tip = repo.get_commit(commit_idx=-1)
958 tip = repo.get_commit(commit_idx=-1)
959 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
959 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
960
960
961 def test_delete_file_view(self, backend):
961 def test_delete_file_view(self, backend):
962 self.app.get(
962 self.app.get(
963 route_path('repo_files_remove_file',
963 route_path('repo_files_remove_file',
964 repo_name=backend.repo_name,
964 repo_name=backend.repo_name,
965 commit_id=backend.default_head_id,
965 commit_id=backend.default_head_id,
966 f_path='vcs/nodes.py'),
966 f_path='vcs/nodes.py'),
967 status=200)
967 status=200)
968
968
969 def test_delete_file_view_not_on_branch(self, backend):
969 def test_delete_file_view_not_on_branch(self, backend):
970 repo = backend.create_repo()
970 repo = backend.create_repo()
971 backend.ensure_file('vcs/nodes.py')
971 backend.ensure_file('vcs/nodes.py')
972
972
973 response = self.app.get(
973 response = self.app.get(
974 route_path('repo_files_remove_file',
974 route_path('repo_files_remove_file',
975 repo_name=repo.repo_name,
975 repo_name=repo.repo_name,
976 commit_id='tip',
976 commit_id='tip',
977 f_path='vcs/nodes.py'),
977 f_path='vcs/nodes.py'),
978 status=302)
978 status=302)
979 assert_session_flash(
979 assert_session_flash(
980 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
980 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
981
981
982 def test_delete_file_view_commit_changes(self, backend, csrf_token):
982 def test_delete_file_view_commit_changes(self, backend, csrf_token):
983 repo = backend.create_repo()
983 repo = backend.create_repo()
984 backend.ensure_file("vcs/nodes.py")
984 backend.ensure_file("vcs/nodes.py")
985
985
986 response = self.app.post(
986 response = self.app.post(
987 route_path('repo_files_delete_file',
987 route_path('repo_files_delete_file',
988 repo_name=repo.repo_name,
988 repo_name=repo.repo_name,
989 commit_id=backend.default_head_id,
989 commit_id=backend.default_head_id,
990 f_path='vcs/nodes.py'),
990 f_path='vcs/nodes.py'),
991 params={
991 params={
992 'message': 'i committed',
992 'message': 'i committed',
993 'csrf_token': csrf_token,
993 'csrf_token': csrf_token,
994 },
994 },
995 status=302)
995 status=302)
996 assert_session_flash(
996 assert_session_flash(
997 response, 'Successfully deleted file `vcs/nodes.py`')
997 response, 'Successfully deleted file `vcs/nodes.py`')
998
998
999
999
1000 @pytest.mark.usefixtures("app")
1000 @pytest.mark.usefixtures("app")
1001 class TestFilesViewOtherCases(object):
1001 class TestFilesViewOtherCases(object):
1002
1002
1003 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
1003 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
1004 self, backend_stub, autologin_regular_user, user_regular,
1004 self, backend_stub, autologin_regular_user, user_regular,
1005 user_util):
1005 user_util):
1006
1006
1007 repo = backend_stub.create_repo()
1007 repo = backend_stub.create_repo()
1008 user_util.grant_user_permission_to_repo(
1008 user_util.grant_user_permission_to_repo(
1009 repo, user_regular, 'repository.write')
1009 repo, user_regular, 'repository.write')
1010 response = self.app.get(
1010 response = self.app.get(
1011 route_path('repo_files',
1011 route_path('repo_files',
1012 repo_name=repo.repo_name,
1012 repo_name=repo.repo_name,
1013 commit_id='tip', f_path='/'))
1013 commit_id='tip', f_path='/'))
1014
1014
1015 repo_file_add_url = route_path(
1015 repo_file_add_url = route_path(
1016 'repo_files_add_file',
1016 'repo_files_add_file',
1017 repo_name=repo.repo_name,
1017 repo_name=repo.repo_name,
1018 commit_id=0, f_path='')
1018 commit_id=0, f_path='')
1019
1019
1020 assert_session_flash(
1020 assert_session_flash(
1021 response,
1021 response,
1022 'There are no files yet. <a class="alert-link" '
1022 'There are no files yet. <a class="alert-link" '
1023 'href="{}">Click here to add a new file.</a>'
1023 'href="{}">Click here to add a new file.</a>'
1024 .format(repo_file_add_url))
1024 .format(repo_file_add_url))
1025
1025
1026 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1026 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1027 self, backend_stub, autologin_regular_user):
1027 self, backend_stub, autologin_regular_user):
1028 repo = backend_stub.create_repo()
1028 repo = backend_stub.create_repo()
1029 # init session for anon user
1029 # init session for anon user
1030 route_path('repo_summary', repo_name=repo.repo_name)
1030 route_path('repo_summary', repo_name=repo.repo_name)
1031
1031
1032 repo_file_add_url = route_path(
1032 repo_file_add_url = route_path(
1033 'repo_files_add_file',
1033 'repo_files_add_file',
1034 repo_name=repo.repo_name,
1034 repo_name=repo.repo_name,
1035 commit_id=0, f_path='')
1035 commit_id=0, f_path='')
1036
1036
1037 response = self.app.get(
1037 response = self.app.get(
1038 route_path('repo_files',
1038 route_path('repo_files',
1039 repo_name=repo.repo_name,
1039 repo_name=repo.repo_name,
1040 commit_id='tip', f_path='/'))
1040 commit_id='tip', f_path='/'))
1041
1041
1042 assert_session_flash(response, no_=repo_file_add_url)
1042 assert_session_flash(response, no_=repo_file_add_url)
1043
1043
1044 @pytest.mark.parametrize('file_node', [
1044 @pytest.mark.parametrize('file_node', [
1045 'archive/file.zip',
1045 'archive/file.zip',
1046 'diff/my-file.txt',
1046 'diff/my-file.txt',
1047 'render.py',
1047 'render.py',
1048 'render',
1048 'render',
1049 'remove_file',
1049 'remove_file',
1050 'remove_file/to-delete.txt',
1050 'remove_file/to-delete.txt',
1051 ])
1051 ])
1052 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1052 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1053 backend.create_repo()
1053 backend.create_repo()
1054 backend.ensure_file(file_node)
1054 backend.ensure_file(file_node)
1055
1055
1056 self.app.get(
1056 self.app.get(
1057 route_path('repo_files',
1057 route_path('repo_files',
1058 repo_name=backend.repo_name,
1058 repo_name=backend.repo_name,
1059 commit_id='tip', f_path=file_node),
1059 commit_id='tip', f_path=file_node),
1060 status=200)
1060 status=200)
1061
1061
1062
1062
1063 class TestAdjustFilePathForSvn(object):
1063 class TestAdjustFilePathForSvn(object):
1064 """
1064 """
1065 SVN specific adjustments of node history in RepoFilesView.
1065 SVN specific adjustments of node history in RepoFilesView.
1066 """
1066 """
1067
1067
1068 def test_returns_path_relative_to_matched_reference(self):
1068 def test_returns_path_relative_to_matched_reference(self):
1069 repo = self._repo(branches=['trunk'])
1069 repo = self._repo(branches=['trunk'])
1070 self.assert_file_adjustment('trunk/file', 'file', repo)
1070 self.assert_file_adjustment('trunk/file', 'file', repo)
1071
1071
1072 def test_does_not_modify_file_if_no_reference_matches(self):
1072 def test_does_not_modify_file_if_no_reference_matches(self):
1073 repo = self._repo(branches=['trunk'])
1073 repo = self._repo(branches=['trunk'])
1074 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1074 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1075
1075
1076 def test_does_not_adjust_partial_directory_names(self):
1076 def test_does_not_adjust_partial_directory_names(self):
1077 repo = self._repo(branches=['trun'])
1077 repo = self._repo(branches=['trun'])
1078 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1078 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1079
1079
1080 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1080 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1081 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1081 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1082 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1082 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1083
1083
1084 def assert_file_adjustment(self, f_path, expected, repo):
1084 def assert_file_adjustment(self, f_path, expected, repo):
1085 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1085 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1086 assert result == expected
1086 assert result == expected
1087
1087
1088 def _repo(self, branches=None):
1088 def _repo(self, branches=None):
1089 repo = mock.Mock()
1089 repo = mock.Mock()
1090 repo.branches = OrderedDict((name, '0') for name in branches or [])
1090 repo.branches = OrderedDict((name, '0') for name in branches or [])
1091 repo.tags = {}
1091 repo.tags = {}
1092 return repo
1092 return repo
@@ -1,149 +1,149 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib.utils2 import md5
23 from rhodecode.lib.utils2 import md5
24 from rhodecode.model.db import Repository
24 from rhodecode.model.db import Repository
25 from rhodecode.model.meta import Session
25 from rhodecode.model.meta import Session
26 from rhodecode.model.settings import SettingsModel, IssueTrackerSettingsModel
26 from rhodecode.model.settings import SettingsModel, IssueTrackerSettingsModel
27
27
28
28
29 def route_path(name, params=None, **kwargs):
29 def route_path(name, params=None, **kwargs):
30 import urllib.request, urllib.parse, urllib.error
30 import urllib.request, urllib.parse, urllib.error
31
31
32 base_url = {
32 base_url = {
33 'repo_summary': '/{repo_name}',
33 'repo_summary': '/{repo_name}',
34 'edit_repo_issuetracker': '/{repo_name}/settings/issue_trackers',
34 'edit_repo_issuetracker': '/{repo_name}/settings/issue_trackers',
35 'edit_repo_issuetracker_test': '/{repo_name}/settings/issue_trackers/test',
35 'edit_repo_issuetracker_test': '/{repo_name}/settings/issue_trackers/test',
36 'edit_repo_issuetracker_delete': '/{repo_name}/settings/issue_trackers/delete',
36 'edit_repo_issuetracker_delete': '/{repo_name}/settings/issue_trackers/delete',
37 'edit_repo_issuetracker_update': '/{repo_name}/settings/issue_trackers/update',
37 'edit_repo_issuetracker_update': '/{repo_name}/settings/issue_trackers/update',
38 }[name].format(**kwargs)
38 }[name].format(**kwargs)
39
39
40 if params:
40 if params:
41 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
41 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
42 return base_url
42 return base_url
43
43
44
44
45 @pytest.mark.usefixtures("app")
45 @pytest.mark.usefixtures("app")
46 class TestRepoIssueTracker(object):
46 class TestRepoIssueTracker(object):
47 def test_issuetracker_index(self, autologin_user, backend):
47 def test_issuetracker_index(self, autologin_user, backend):
48 repo = backend.create_repo()
48 repo = backend.create_repo()
49 response = self.app.get(route_path('edit_repo_issuetracker',
49 response = self.app.get(route_path('edit_repo_issuetracker',
50 repo_name=repo.repo_name))
50 repo_name=repo.repo_name))
51 assert response.status_code == 200
51 assert response.status_code == 200
52
52
53 def test_add_and_test_issuetracker_patterns(
53 def test_add_and_test_issuetracker_patterns(
54 self, autologin_user, backend, csrf_token, request, xhr_header):
54 self, autologin_user, backend, csrf_token, request, xhr_header):
55 pattern = 'issuetracker_pat'
55 pattern = 'issuetracker_pat'
56 another_pattern = pattern+'1'
56 another_pattern = pattern+'1'
57 post_url = route_path(
57 post_url = route_path(
58 'edit_repo_issuetracker_update', repo_name=backend.repo.repo_name)
58 'edit_repo_issuetracker_update', repo_name=backend.repo.repo_name)
59 post_data = {
59 post_data = {
60 'new_pattern_pattern_0': pattern,
60 'new_pattern_pattern_0': pattern,
61 'new_pattern_url_0': 'http://url',
61 'new_pattern_url_0': 'http://url',
62 'new_pattern_prefix_0': 'prefix',
62 'new_pattern_prefix_0': 'prefix',
63 'new_pattern_description_0': 'description',
63 'new_pattern_description_0': 'description',
64 'new_pattern_pattern_1': another_pattern,
64 'new_pattern_pattern_1': another_pattern,
65 'new_pattern_url_1': '/url1',
65 'new_pattern_url_1': '/url1',
66 'new_pattern_prefix_1': 'prefix1',
66 'new_pattern_prefix_1': 'prefix1',
67 'new_pattern_description_1': 'description1',
67 'new_pattern_description_1': 'description1',
68 'csrf_token': csrf_token
68 'csrf_token': csrf_token
69 }
69 }
70 self.app.post(post_url, post_data, status=302)
70 self.app.post(post_url, post_data, status=302)
71 self.settings_model = IssueTrackerSettingsModel(repo=backend.repo)
71 self.settings_model = IssueTrackerSettingsModel(repo=backend.repo)
72 settings = self.settings_model.get_repo_settings()
72 settings = self.settings_model.get_repo_settings()
73 self.uid = md5(pattern)
73 self.uid = md5(pattern)
74 assert settings[self.uid]['pat'] == pattern
74 assert settings[self.uid]['pat'] == pattern
75 self.another_uid = md5(another_pattern)
75 self.another_uid = md5(another_pattern)
76 assert settings[self.another_uid]['pat'] == another_pattern
76 assert settings[self.another_uid]['pat'] == another_pattern
77
77
78 # test pattern
78 # test pattern
79 data = {'test_text': 'example of issuetracker_pat replacement',
79 data = {'test_text': 'example of issuetracker_pat replacement',
80 'csrf_token': csrf_token}
80 'csrf_token': csrf_token}
81 response = self.app.post(
81 response = self.app.post(
82 route_path('edit_repo_issuetracker_test',
82 route_path('edit_repo_issuetracker_test',
83 repo_name=backend.repo.repo_name),
83 repo_name=backend.repo.repo_name),
84 extra_environ=xhr_header, params=data)
84 extra_environ=xhr_header, params=data)
85
85
86 assert response.body == \
86 assert response.text == \
87 'example of <a class="tooltip issue-tracker-link" href="http://url" title="description">prefix</a> replacement'
87 'example of <a class="tooltip issue-tracker-link" href="http://url" title="description">prefix</a> replacement'
88
88
89 @request.addfinalizer
89 @request.addfinalizer
90 def cleanup():
90 def cleanup():
91 self.settings_model.delete_entries(self.uid)
91 self.settings_model.delete_entries(self.uid)
92 self.settings_model.delete_entries(self.another_uid)
92 self.settings_model.delete_entries(self.another_uid)
93
93
94 def test_edit_issuetracker_pattern(
94 def test_edit_issuetracker_pattern(
95 self, autologin_user, backend, csrf_token, request):
95 self, autologin_user, backend, csrf_token, request):
96 entry_key = 'issuetracker_pat_'
96 entry_key = 'issuetracker_pat_'
97 pattern = 'issuetracker_pat2'
97 pattern = 'issuetracker_pat2'
98 old_pattern = 'issuetracker_pat'
98 old_pattern = 'issuetracker_pat'
99 old_uid = md5(old_pattern)
99 old_uid = md5(old_pattern)
100
100
101 sett = SettingsModel(repo=backend.repo).create_or_update_setting(
101 sett = SettingsModel(repo=backend.repo).create_or_update_setting(
102 entry_key+old_uid, old_pattern, 'unicode')
102 entry_key+old_uid, old_pattern, 'unicode')
103 Session().add(sett)
103 Session().add(sett)
104 Session().commit()
104 Session().commit()
105 post_url = route_path(
105 post_url = route_path(
106 'edit_repo_issuetracker_update', repo_name=backend.repo.repo_name)
106 'edit_repo_issuetracker_update', repo_name=backend.repo.repo_name)
107 post_data = {
107 post_data = {
108 'new_pattern_pattern_0': pattern,
108 'new_pattern_pattern_0': pattern,
109 'new_pattern_url_0': '/url',
109 'new_pattern_url_0': '/url',
110 'new_pattern_prefix_0': 'prefix',
110 'new_pattern_prefix_0': 'prefix',
111 'new_pattern_description_0': 'description',
111 'new_pattern_description_0': 'description',
112 'uid': old_uid,
112 'uid': old_uid,
113 'csrf_token': csrf_token
113 'csrf_token': csrf_token
114 }
114 }
115 self.app.post(post_url, post_data, status=302)
115 self.app.post(post_url, post_data, status=302)
116 self.settings_model = IssueTrackerSettingsModel(repo=backend.repo)
116 self.settings_model = IssueTrackerSettingsModel(repo=backend.repo)
117 settings = self.settings_model.get_repo_settings()
117 settings = self.settings_model.get_repo_settings()
118 self.uid = md5(pattern)
118 self.uid = md5(pattern)
119 assert settings[self.uid]['pat'] == pattern
119 assert settings[self.uid]['pat'] == pattern
120 with pytest.raises(KeyError):
120 with pytest.raises(KeyError):
121 key = settings[old_uid]
121 key = settings[old_uid]
122
122
123 @request.addfinalizer
123 @request.addfinalizer
124 def cleanup():
124 def cleanup():
125 self.settings_model.delete_entries(self.uid)
125 self.settings_model.delete_entries(self.uid)
126
126
127 def test_delete_issuetracker_pattern(
127 def test_delete_issuetracker_pattern(
128 self, autologin_user, backend, csrf_token, settings_util, xhr_header):
128 self, autologin_user, backend, csrf_token, settings_util, xhr_header):
129 repo = backend.create_repo()
129 repo = backend.create_repo()
130 repo_name = repo.repo_name
130 repo_name = repo.repo_name
131 entry_key = 'issuetracker_pat_'
131 entry_key = 'issuetracker_pat_'
132 pattern = 'issuetracker_pat3'
132 pattern = 'issuetracker_pat3'
133 uid = md5(pattern)
133 uid = md5(pattern)
134 settings_util.create_repo_rhodecode_setting(
134 settings_util.create_repo_rhodecode_setting(
135 repo=backend.repo, name=entry_key+uid,
135 repo=backend.repo, name=entry_key+uid,
136 value=entry_key, type_='unicode', cleanup=False)
136 value=entry_key, type_='unicode', cleanup=False)
137
137
138 self.app.post(
138 self.app.post(
139 route_path(
139 route_path(
140 'edit_repo_issuetracker_delete',
140 'edit_repo_issuetracker_delete',
141 repo_name=backend.repo.repo_name),
141 repo_name=backend.repo.repo_name),
142 {
142 {
143 'uid': uid,
143 'uid': uid,
144 'csrf_token': csrf_token,
144 'csrf_token': csrf_token,
145 '': ''
145 '': ''
146 }, extra_environ=xhr_header, status=200)
146 }, extra_environ=xhr_header, status=200)
147 settings = IssueTrackerSettingsModel(
147 settings = IssueTrackerSettingsModel(
148 repo=Repository.get_by_repo_name(repo_name)).get_repo_settings()
148 repo=Repository.get_by_repo_name(repo_name)).get_repo_settings()
149 assert 'rhodecode_%s%s' % (entry_key, uid) not in settings
149 assert 'rhodecode_%s%s' % (entry_key, uid) not in settings
@@ -1,1680 +1,1680 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib import helpers as h
24 from rhodecode.lib import helpers as h
25 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
26 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib.vcs.nodes import FileNode
27 from rhodecode.lib.ext_json import json
27 from rhodecode.lib.ext_json import json
28 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.changeset_status import ChangesetStatusModel
29 from rhodecode.model.db import (
29 from rhodecode.model.db import (
30 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
31 from rhodecode.model.meta import Session
31 from rhodecode.model.meta import Session
32 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.pull_request import PullRequestModel
33 from rhodecode.model.user import UserModel
33 from rhodecode.model.user import UserModel
34 from rhodecode.model.comment import CommentsModel
34 from rhodecode.model.comment import CommentsModel
35 from rhodecode.tests import (
35 from rhodecode.tests import (
36 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
36 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
37
37
38
38
39 def route_path(name, params=None, **kwargs):
39 def route_path(name, params=None, **kwargs):
40 import urllib.request, urllib.parse, urllib.error
40 import urllib.request, urllib.parse, urllib.error
41
41
42 base_url = {
42 base_url = {
43 'repo_changelog': '/{repo_name}/changelog',
43 'repo_changelog': '/{repo_name}/changelog',
44 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
45 'repo_commits': '/{repo_name}/commits',
45 'repo_commits': '/{repo_name}/commits',
46 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
46 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
47 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
47 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
48 'pullrequest_show_all': '/{repo_name}/pull-request',
48 'pullrequest_show_all': '/{repo_name}/pull-request',
49 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
49 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
50 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
50 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
51 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
51 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
52 'pullrequest_new': '/{repo_name}/pull-request/new',
52 'pullrequest_new': '/{repo_name}/pull-request/new',
53 'pullrequest_create': '/{repo_name}/pull-request/create',
53 'pullrequest_create': '/{repo_name}/pull-request/create',
54 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
54 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
55 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
55 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
56 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
56 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
57 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
57 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
58 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
58 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
59 'pullrequest_comment_edit': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit',
59 'pullrequest_comment_edit': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit',
60 }[name].format(**kwargs)
60 }[name].format(**kwargs)
61
61
62 if params:
62 if params:
63 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
63 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
64 return base_url
64 return base_url
65
65
66
66
67 @pytest.mark.usefixtures('app', 'autologin_user')
67 @pytest.mark.usefixtures('app', 'autologin_user')
68 @pytest.mark.backends("git", "hg")
68 @pytest.mark.backends("git", "hg")
69 class TestPullrequestsView(object):
69 class TestPullrequestsView(object):
70
70
71 def test_index(self, backend):
71 def test_index(self, backend):
72 self.app.get(route_path(
72 self.app.get(route_path(
73 'pullrequest_new',
73 'pullrequest_new',
74 repo_name=backend.repo_name))
74 repo_name=backend.repo_name))
75
75
76 def test_option_menu_create_pull_request_exists(self, backend):
76 def test_option_menu_create_pull_request_exists(self, backend):
77 repo_name = backend.repo_name
77 repo_name = backend.repo_name
78 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
78 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
79
79
80 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
80 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
81 'pullrequest_new', repo_name=repo_name)
81 'pullrequest_new', repo_name=repo_name)
82 response.mustcontain(create_pr_link)
82 response.mustcontain(create_pr_link)
83
83
84 def test_create_pr_form_with_raw_commit_id(self, backend):
84 def test_create_pr_form_with_raw_commit_id(self, backend):
85 repo = backend.repo
85 repo = backend.repo
86
86
87 self.app.get(
87 self.app.get(
88 route_path('pullrequest_new', repo_name=repo.repo_name,
88 route_path('pullrequest_new', repo_name=repo.repo_name,
89 commit=repo.get_commit().raw_id),
89 commit=repo.get_commit().raw_id),
90 status=200)
90 status=200)
91
91
92 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
92 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
93 @pytest.mark.parametrize('range_diff', ["0", "1"])
93 @pytest.mark.parametrize('range_diff', ["0", "1"])
94 def test_show(self, pr_util, pr_merge_enabled, range_diff):
94 def test_show(self, pr_util, pr_merge_enabled, range_diff):
95 pull_request = pr_util.create_pull_request(
95 pull_request = pr_util.create_pull_request(
96 mergeable=pr_merge_enabled, enable_notifications=False)
96 mergeable=pr_merge_enabled, enable_notifications=False)
97
97
98 response = self.app.get(route_path(
98 response = self.app.get(route_path(
99 'pullrequest_show',
99 'pullrequest_show',
100 repo_name=pull_request.target_repo.scm_instance().name,
100 repo_name=pull_request.target_repo.scm_instance().name,
101 pull_request_id=pull_request.pull_request_id,
101 pull_request_id=pull_request.pull_request_id,
102 params={'range-diff': range_diff}))
102 params={'range-diff': range_diff}))
103
103
104 for commit_id in pull_request.revisions:
104 for commit_id in pull_request.revisions:
105 response.mustcontain(commit_id)
105 response.mustcontain(commit_id)
106
106
107 response.mustcontain(pull_request.target_ref_parts.type)
107 response.mustcontain(pull_request.target_ref_parts.type)
108 response.mustcontain(pull_request.target_ref_parts.name)
108 response.mustcontain(pull_request.target_ref_parts.name)
109
109
110 response.mustcontain('class="pull-request-merge"')
110 response.mustcontain('class="pull-request-merge"')
111
111
112 if pr_merge_enabled:
112 if pr_merge_enabled:
113 response.mustcontain('Pull request reviewer approval is pending')
113 response.mustcontain('Pull request reviewer approval is pending')
114 else:
114 else:
115 response.mustcontain('Server-side pull request merging is disabled.')
115 response.mustcontain('Server-side pull request merging is disabled.')
116
116
117 if range_diff == "1":
117 if range_diff == "1":
118 response.mustcontain('Turn off: Show the diff as commit range')
118 response.mustcontain('Turn off: Show the diff as commit range')
119
119
120 def test_show_versions_of_pr(self, backend, csrf_token):
120 def test_show_versions_of_pr(self, backend, csrf_token):
121 commits = [
121 commits = [
122 {'message': 'initial-commit',
122 {'message': 'initial-commit',
123 'added': [FileNode('test-file.txt', 'LINE1\n')]},
123 'added': [FileNode('test-file.txt', 'LINE1\n')]},
124
124
125 {'message': 'commit-1',
125 {'message': 'commit-1',
126 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\n')]},
126 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\n')]},
127 # Above is the initial version of PR that changes a single line
127 # Above is the initial version of PR that changes a single line
128
128
129 # from now on we'll add 3x commit adding a nother line on each step
129 # from now on we'll add 3x commit adding a nother line on each step
130 {'message': 'commit-2',
130 {'message': 'commit-2',
131 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\n')]},
131 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\n')]},
132
132
133 {'message': 'commit-3',
133 {'message': 'commit-3',
134 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\n')]},
134 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\n')]},
135
135
136 {'message': 'commit-4',
136 {'message': 'commit-4',
137 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]},
137 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]},
138 ]
138 ]
139
139
140 commit_ids = backend.create_master_repo(commits)
140 commit_ids = backend.create_master_repo(commits)
141 target = backend.create_repo(heads=['initial-commit'])
141 target = backend.create_repo(heads=['initial-commit'])
142 source = backend.create_repo(heads=['commit-1'])
142 source = backend.create_repo(heads=['commit-1'])
143 source_repo_name = source.repo_name
143 source_repo_name = source.repo_name
144 target_repo_name = target.repo_name
144 target_repo_name = target.repo_name
145
145
146 target_ref = 'branch:{branch}:{commit_id}'.format(
146 target_ref = 'branch:{branch}:{commit_id}'.format(
147 branch=backend.default_branch_name, commit_id=commit_ids['initial-commit'])
147 branch=backend.default_branch_name, commit_id=commit_ids['initial-commit'])
148 source_ref = 'branch:{branch}:{commit_id}'.format(
148 source_ref = 'branch:{branch}:{commit_id}'.format(
149 branch=backend.default_branch_name, commit_id=commit_ids['commit-1'])
149 branch=backend.default_branch_name, commit_id=commit_ids['commit-1'])
150
150
151 response = self.app.post(
151 response = self.app.post(
152 route_path('pullrequest_create', repo_name=source.repo_name),
152 route_path('pullrequest_create', repo_name=source.repo_name),
153 [
153 [
154 ('source_repo', source_repo_name),
154 ('source_repo', source_repo_name),
155 ('source_ref', source_ref),
155 ('source_ref', source_ref),
156 ('target_repo', target_repo_name),
156 ('target_repo', target_repo_name),
157 ('target_ref', target_ref),
157 ('target_ref', target_ref),
158 ('common_ancestor', commit_ids['initial-commit']),
158 ('common_ancestor', commit_ids['initial-commit']),
159 ('pullrequest_title', 'Title'),
159 ('pullrequest_title', 'Title'),
160 ('pullrequest_desc', 'Description'),
160 ('pullrequest_desc', 'Description'),
161 ('description_renderer', 'markdown'),
161 ('description_renderer', 'markdown'),
162 ('__start__', 'review_members:sequence'),
162 ('__start__', 'review_members:sequence'),
163 ('__start__', 'reviewer:mapping'),
163 ('__start__', 'reviewer:mapping'),
164 ('user_id', '1'),
164 ('user_id', '1'),
165 ('__start__', 'reasons:sequence'),
165 ('__start__', 'reasons:sequence'),
166 ('reason', 'Some reason'),
166 ('reason', 'Some reason'),
167 ('__end__', 'reasons:sequence'),
167 ('__end__', 'reasons:sequence'),
168 ('__start__', 'rules:sequence'),
168 ('__start__', 'rules:sequence'),
169 ('__end__', 'rules:sequence'),
169 ('__end__', 'rules:sequence'),
170 ('mandatory', 'False'),
170 ('mandatory', 'False'),
171 ('__end__', 'reviewer:mapping'),
171 ('__end__', 'reviewer:mapping'),
172 ('__end__', 'review_members:sequence'),
172 ('__end__', 'review_members:sequence'),
173 ('__start__', 'revisions:sequence'),
173 ('__start__', 'revisions:sequence'),
174 ('revisions', commit_ids['commit-1']),
174 ('revisions', commit_ids['commit-1']),
175 ('__end__', 'revisions:sequence'),
175 ('__end__', 'revisions:sequence'),
176 ('user', ''),
176 ('user', ''),
177 ('csrf_token', csrf_token),
177 ('csrf_token', csrf_token),
178 ],
178 ],
179 status=302)
179 status=302)
180
180
181 location = response.headers['Location']
181 location = response.headers['Location']
182
182
183 pull_request_id = location.rsplit('/', 1)[1]
183 pull_request_id = location.rsplit('/', 1)[1]
184 assert pull_request_id != 'new'
184 assert pull_request_id != 'new'
185 pull_request = PullRequest.get(int(pull_request_id))
185 pull_request = PullRequest.get(int(pull_request_id))
186
186
187 pull_request_id = pull_request.pull_request_id
187 pull_request_id = pull_request.pull_request_id
188
188
189 # Show initial version of PR
189 # Show initial version of PR
190 response = self.app.get(
190 response = self.app.get(
191 route_path('pullrequest_show',
191 route_path('pullrequest_show',
192 repo_name=target_repo_name,
192 repo_name=target_repo_name,
193 pull_request_id=pull_request_id))
193 pull_request_id=pull_request_id))
194
194
195 response.mustcontain('commit-1')
195 response.mustcontain('commit-1')
196 response.mustcontain(no=['commit-2'])
196 response.mustcontain(no=['commit-2'])
197 response.mustcontain(no=['commit-3'])
197 response.mustcontain(no=['commit-3'])
198 response.mustcontain(no=['commit-4'])
198 response.mustcontain(no=['commit-4'])
199
199
200 response.mustcontain('cb-addition"></span><span>LINE2</span>')
200 response.mustcontain('cb-addition"></span><span>LINE2</span>')
201 response.mustcontain(no=['LINE3'])
201 response.mustcontain(no=['LINE3'])
202 response.mustcontain(no=['LINE4'])
202 response.mustcontain(no=['LINE4'])
203 response.mustcontain(no=['LINE5'])
203 response.mustcontain(no=['LINE5'])
204
204
205 # update PR #1
205 # update PR #1
206 source_repo = Repository.get_by_repo_name(source_repo_name)
206 source_repo = Repository.get_by_repo_name(source_repo_name)
207 backend.pull_heads(source_repo, heads=['commit-2'])
207 backend.pull_heads(source_repo, heads=['commit-2'])
208 response = self.app.post(
208 response = self.app.post(
209 route_path('pullrequest_update',
209 route_path('pullrequest_update',
210 repo_name=target_repo_name, pull_request_id=pull_request_id),
210 repo_name=target_repo_name, pull_request_id=pull_request_id),
211 params={'update_commits': 'true', 'csrf_token': csrf_token})
211 params={'update_commits': 'true', 'csrf_token': csrf_token})
212
212
213 # update PR #2
213 # update PR #2
214 source_repo = Repository.get_by_repo_name(source_repo_name)
214 source_repo = Repository.get_by_repo_name(source_repo_name)
215 backend.pull_heads(source_repo, heads=['commit-3'])
215 backend.pull_heads(source_repo, heads=['commit-3'])
216 response = self.app.post(
216 response = self.app.post(
217 route_path('pullrequest_update',
217 route_path('pullrequest_update',
218 repo_name=target_repo_name, pull_request_id=pull_request_id),
218 repo_name=target_repo_name, pull_request_id=pull_request_id),
219 params={'update_commits': 'true', 'csrf_token': csrf_token})
219 params={'update_commits': 'true', 'csrf_token': csrf_token})
220
220
221 # update PR #3
221 # update PR #3
222 source_repo = Repository.get_by_repo_name(source_repo_name)
222 source_repo = Repository.get_by_repo_name(source_repo_name)
223 backend.pull_heads(source_repo, heads=['commit-4'])
223 backend.pull_heads(source_repo, heads=['commit-4'])
224 response = self.app.post(
224 response = self.app.post(
225 route_path('pullrequest_update',
225 route_path('pullrequest_update',
226 repo_name=target_repo_name, pull_request_id=pull_request_id),
226 repo_name=target_repo_name, pull_request_id=pull_request_id),
227 params={'update_commits': 'true', 'csrf_token': csrf_token})
227 params={'update_commits': 'true', 'csrf_token': csrf_token})
228
228
229 # Show final version !
229 # Show final version !
230 response = self.app.get(
230 response = self.app.get(
231 route_path('pullrequest_show',
231 route_path('pullrequest_show',
232 repo_name=target_repo_name,
232 repo_name=target_repo_name,
233 pull_request_id=pull_request_id))
233 pull_request_id=pull_request_id))
234
234
235 # 3 updates, and the latest == 4
235 # 3 updates, and the latest == 4
236 response.mustcontain('4 versions available for this pull request')
236 response.mustcontain('4 versions available for this pull request')
237 response.mustcontain(no=['rhodecode diff rendering error'])
237 response.mustcontain(no=['rhodecode diff rendering error'])
238
238
239 # initial show must have 3 commits, and 3 adds
239 # initial show must have 3 commits, and 3 adds
240 response.mustcontain('commit-1')
240 response.mustcontain('commit-1')
241 response.mustcontain('commit-2')
241 response.mustcontain('commit-2')
242 response.mustcontain('commit-3')
242 response.mustcontain('commit-3')
243 response.mustcontain('commit-4')
243 response.mustcontain('commit-4')
244
244
245 response.mustcontain('cb-addition"></span><span>LINE2</span>')
245 response.mustcontain('cb-addition"></span><span>LINE2</span>')
246 response.mustcontain('cb-addition"></span><span>LINE3</span>')
246 response.mustcontain('cb-addition"></span><span>LINE3</span>')
247 response.mustcontain('cb-addition"></span><span>LINE4</span>')
247 response.mustcontain('cb-addition"></span><span>LINE4</span>')
248 response.mustcontain('cb-addition"></span><span>LINE5</span>')
248 response.mustcontain('cb-addition"></span><span>LINE5</span>')
249
249
250 # fetch versions
250 # fetch versions
251 pr = PullRequest.get(pull_request_id)
251 pr = PullRequest.get(pull_request_id)
252 versions = [x.pull_request_version_id for x in pr.versions.all()]
252 versions = [x.pull_request_version_id for x in pr.versions.all()]
253 assert len(versions) == 3
253 assert len(versions) == 3
254
254
255 # show v1,v2,v3,v4
255 # show v1,v2,v3,v4
256 def cb_line(text):
256 def cb_line(text):
257 return 'cb-addition"></span><span>{}</span>'.format(text)
257 return 'cb-addition"></span><span>{}</span>'.format(text)
258
258
259 def cb_context(text):
259 def cb_context(text):
260 return '<span class="cb-code"><span class="cb-action cb-context">' \
260 return '<span class="cb-code"><span class="cb-action cb-context">' \
261 '</span><span>{}</span></span>'.format(text)
261 '</span><span>{}</span></span>'.format(text)
262
262
263 commit_tests = {
263 commit_tests = {
264 # in response, not in response
264 # in response, not in response
265 1: (['commit-1'], ['commit-2', 'commit-3', 'commit-4']),
265 1: (['commit-1'], ['commit-2', 'commit-3', 'commit-4']),
266 2: (['commit-1', 'commit-2'], ['commit-3', 'commit-4']),
266 2: (['commit-1', 'commit-2'], ['commit-3', 'commit-4']),
267 3: (['commit-1', 'commit-2', 'commit-3'], ['commit-4']),
267 3: (['commit-1', 'commit-2', 'commit-3'], ['commit-4']),
268 4: (['commit-1', 'commit-2', 'commit-3', 'commit-4'], []),
268 4: (['commit-1', 'commit-2', 'commit-3', 'commit-4'], []),
269 }
269 }
270 diff_tests = {
270 diff_tests = {
271 1: (['LINE2'], ['LINE3', 'LINE4', 'LINE5']),
271 1: (['LINE2'], ['LINE3', 'LINE4', 'LINE5']),
272 2: (['LINE2', 'LINE3'], ['LINE4', 'LINE5']),
272 2: (['LINE2', 'LINE3'], ['LINE4', 'LINE5']),
273 3: (['LINE2', 'LINE3', 'LINE4'], ['LINE5']),
273 3: (['LINE2', 'LINE3', 'LINE4'], ['LINE5']),
274 4: (['LINE2', 'LINE3', 'LINE4', 'LINE5'], []),
274 4: (['LINE2', 'LINE3', 'LINE4', 'LINE5'], []),
275 }
275 }
276 for idx, ver in enumerate(versions, 1):
276 for idx, ver in enumerate(versions, 1):
277
277
278 response = self.app.get(
278 response = self.app.get(
279 route_path('pullrequest_show',
279 route_path('pullrequest_show',
280 repo_name=target_repo_name,
280 repo_name=target_repo_name,
281 pull_request_id=pull_request_id,
281 pull_request_id=pull_request_id,
282 params={'version': ver}))
282 params={'version': ver}))
283
283
284 response.mustcontain(no=['rhodecode diff rendering error'])
284 response.mustcontain(no=['rhodecode diff rendering error'])
285 response.mustcontain('Showing changes at v{}'.format(idx))
285 response.mustcontain('Showing changes at v{}'.format(idx))
286
286
287 yes, no = commit_tests[idx]
287 yes, no = commit_tests[idx]
288 for y in yes:
288 for y in yes:
289 response.mustcontain(y)
289 response.mustcontain(y)
290 for n in no:
290 for n in no:
291 response.mustcontain(no=n)
291 response.mustcontain(no=n)
292
292
293 yes, no = diff_tests[idx]
293 yes, no = diff_tests[idx]
294 for y in yes:
294 for y in yes:
295 response.mustcontain(cb_line(y))
295 response.mustcontain(cb_line(y))
296 for n in no:
296 for n in no:
297 response.mustcontain(no=n)
297 response.mustcontain(no=n)
298
298
299 # show diff between versions
299 # show diff between versions
300 diff_compare_tests = {
300 diff_compare_tests = {
301 1: (['LINE3'], ['LINE1', 'LINE2']),
301 1: (['LINE3'], ['LINE1', 'LINE2']),
302 2: (['LINE3', 'LINE4'], ['LINE1', 'LINE2']),
302 2: (['LINE3', 'LINE4'], ['LINE1', 'LINE2']),
303 3: (['LINE3', 'LINE4', 'LINE5'], ['LINE1', 'LINE2']),
303 3: (['LINE3', 'LINE4', 'LINE5'], ['LINE1', 'LINE2']),
304 }
304 }
305 for idx, ver in enumerate(versions, 1):
305 for idx, ver in enumerate(versions, 1):
306 adds, context = diff_compare_tests[idx]
306 adds, context = diff_compare_tests[idx]
307
307
308 to_ver = ver+1
308 to_ver = ver+1
309 if idx == 3:
309 if idx == 3:
310 to_ver = 'latest'
310 to_ver = 'latest'
311
311
312 response = self.app.get(
312 response = self.app.get(
313 route_path('pullrequest_show',
313 route_path('pullrequest_show',
314 repo_name=target_repo_name,
314 repo_name=target_repo_name,
315 pull_request_id=pull_request_id,
315 pull_request_id=pull_request_id,
316 params={'from_version': versions[0], 'version': to_ver}))
316 params={'from_version': versions[0], 'version': to_ver}))
317
317
318 response.mustcontain(no=['rhodecode diff rendering error'])
318 response.mustcontain(no=['rhodecode diff rendering error'])
319
319
320 for a in adds:
320 for a in adds:
321 response.mustcontain(cb_line(a))
321 response.mustcontain(cb_line(a))
322 for c in context:
322 for c in context:
323 response.mustcontain(cb_context(c))
323 response.mustcontain(cb_context(c))
324
324
325 # test version v2 -> v3
325 # test version v2 -> v3
326 response = self.app.get(
326 response = self.app.get(
327 route_path('pullrequest_show',
327 route_path('pullrequest_show',
328 repo_name=target_repo_name,
328 repo_name=target_repo_name,
329 pull_request_id=pull_request_id,
329 pull_request_id=pull_request_id,
330 params={'from_version': versions[1], 'version': versions[2]}))
330 params={'from_version': versions[1], 'version': versions[2]}))
331
331
332 response.mustcontain(cb_context('LINE1'))
332 response.mustcontain(cb_context('LINE1'))
333 response.mustcontain(cb_context('LINE2'))
333 response.mustcontain(cb_context('LINE2'))
334 response.mustcontain(cb_context('LINE3'))
334 response.mustcontain(cb_context('LINE3'))
335 response.mustcontain(cb_line('LINE4'))
335 response.mustcontain(cb_line('LINE4'))
336
336
337 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
337 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
338 # Logout
338 # Logout
339 response = self.app.post(
339 response = self.app.post(
340 h.route_path('logout'),
340 h.route_path('logout'),
341 params={'csrf_token': csrf_token})
341 params={'csrf_token': csrf_token})
342 # Login as regular user
342 # Login as regular user
343 response = self.app.post(h.route_path('login'),
343 response = self.app.post(h.route_path('login'),
344 {'username': TEST_USER_REGULAR_LOGIN,
344 {'username': TEST_USER_REGULAR_LOGIN,
345 'password': 'test12'})
345 'password': 'test12'})
346
346
347 pull_request = pr_util.create_pull_request(
347 pull_request = pr_util.create_pull_request(
348 author=TEST_USER_REGULAR_LOGIN)
348 author=TEST_USER_REGULAR_LOGIN)
349
349
350 response = self.app.get(route_path(
350 response = self.app.get(route_path(
351 'pullrequest_show',
351 'pullrequest_show',
352 repo_name=pull_request.target_repo.scm_instance().name,
352 repo_name=pull_request.target_repo.scm_instance().name,
353 pull_request_id=pull_request.pull_request_id))
353 pull_request_id=pull_request.pull_request_id))
354
354
355 response.mustcontain('Server-side pull request merging is disabled.')
355 response.mustcontain('Server-side pull request merging is disabled.')
356
356
357 assert_response = response.assert_response()
357 assert_response = response.assert_response()
358 # for regular user without a merge permissions, we don't see it
358 # for regular user without a merge permissions, we don't see it
359 assert_response.no_element_exists('#close-pull-request-action')
359 assert_response.no_element_exists('#close-pull-request-action')
360
360
361 user_util.grant_user_permission_to_repo(
361 user_util.grant_user_permission_to_repo(
362 pull_request.target_repo,
362 pull_request.target_repo,
363 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
363 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
364 'repository.write')
364 'repository.write')
365 response = self.app.get(route_path(
365 response = self.app.get(route_path(
366 'pullrequest_show',
366 'pullrequest_show',
367 repo_name=pull_request.target_repo.scm_instance().name,
367 repo_name=pull_request.target_repo.scm_instance().name,
368 pull_request_id=pull_request.pull_request_id))
368 pull_request_id=pull_request.pull_request_id))
369
369
370 response.mustcontain('Server-side pull request merging is disabled.')
370 response.mustcontain('Server-side pull request merging is disabled.')
371
371
372 assert_response = response.assert_response()
372 assert_response = response.assert_response()
373 # now regular user has a merge permissions, we have CLOSE button
373 # now regular user has a merge permissions, we have CLOSE button
374 assert_response.one_element_exists('#close-pull-request-action')
374 assert_response.one_element_exists('#close-pull-request-action')
375
375
376 def test_show_invalid_commit_id(self, pr_util):
376 def test_show_invalid_commit_id(self, pr_util):
377 # Simulating invalid revisions which will cause a lookup error
377 # Simulating invalid revisions which will cause a lookup error
378 pull_request = pr_util.create_pull_request()
378 pull_request = pr_util.create_pull_request()
379 pull_request.revisions = ['invalid']
379 pull_request.revisions = ['invalid']
380 Session().add(pull_request)
380 Session().add(pull_request)
381 Session().commit()
381 Session().commit()
382
382
383 response = self.app.get(route_path(
383 response = self.app.get(route_path(
384 'pullrequest_show',
384 'pullrequest_show',
385 repo_name=pull_request.target_repo.scm_instance().name,
385 repo_name=pull_request.target_repo.scm_instance().name,
386 pull_request_id=pull_request.pull_request_id))
386 pull_request_id=pull_request.pull_request_id))
387
387
388 for commit_id in pull_request.revisions:
388 for commit_id in pull_request.revisions:
389 response.mustcontain(commit_id)
389 response.mustcontain(commit_id)
390
390
391 def test_show_invalid_source_reference(self, pr_util):
391 def test_show_invalid_source_reference(self, pr_util):
392 pull_request = pr_util.create_pull_request()
392 pull_request = pr_util.create_pull_request()
393 pull_request.source_ref = 'branch:b:invalid'
393 pull_request.source_ref = 'branch:b:invalid'
394 Session().add(pull_request)
394 Session().add(pull_request)
395 Session().commit()
395 Session().commit()
396
396
397 self.app.get(route_path(
397 self.app.get(route_path(
398 'pullrequest_show',
398 'pullrequest_show',
399 repo_name=pull_request.target_repo.scm_instance().name,
399 repo_name=pull_request.target_repo.scm_instance().name,
400 pull_request_id=pull_request.pull_request_id))
400 pull_request_id=pull_request.pull_request_id))
401
401
402 def test_edit_title_description(self, pr_util, csrf_token):
402 def test_edit_title_description(self, pr_util, csrf_token):
403 pull_request = pr_util.create_pull_request()
403 pull_request = pr_util.create_pull_request()
404 pull_request_id = pull_request.pull_request_id
404 pull_request_id = pull_request.pull_request_id
405
405
406 response = self.app.post(
406 response = self.app.post(
407 route_path('pullrequest_update',
407 route_path('pullrequest_update',
408 repo_name=pull_request.target_repo.repo_name,
408 repo_name=pull_request.target_repo.repo_name,
409 pull_request_id=pull_request_id),
409 pull_request_id=pull_request_id),
410 params={
410 params={
411 'edit_pull_request': 'true',
411 'edit_pull_request': 'true',
412 'title': 'New title',
412 'title': 'New title',
413 'description': 'New description',
413 'description': 'New description',
414 'csrf_token': csrf_token})
414 'csrf_token': csrf_token})
415
415
416 assert_session_flash(
416 assert_session_flash(
417 response, u'Pull request title & description updated.',
417 response, u'Pull request title & description updated.',
418 category='success')
418 category='success')
419
419
420 pull_request = PullRequest.get(pull_request_id)
420 pull_request = PullRequest.get(pull_request_id)
421 assert pull_request.title == 'New title'
421 assert pull_request.title == 'New title'
422 assert pull_request.description == 'New description'
422 assert pull_request.description == 'New description'
423
423
424 def test_edit_title_description(self, pr_util, csrf_token):
424 def test_edit_title_description(self, pr_util, csrf_token):
425 pull_request = pr_util.create_pull_request()
425 pull_request = pr_util.create_pull_request()
426 pull_request_id = pull_request.pull_request_id
426 pull_request_id = pull_request.pull_request_id
427
427
428 response = self.app.post(
428 response = self.app.post(
429 route_path('pullrequest_update',
429 route_path('pullrequest_update',
430 repo_name=pull_request.target_repo.repo_name,
430 repo_name=pull_request.target_repo.repo_name,
431 pull_request_id=pull_request_id),
431 pull_request_id=pull_request_id),
432 params={
432 params={
433 'edit_pull_request': 'true',
433 'edit_pull_request': 'true',
434 'title': 'New title {} {2} {foo}',
434 'title': 'New title {} {2} {foo}',
435 'description': 'New description',
435 'description': 'New description',
436 'csrf_token': csrf_token})
436 'csrf_token': csrf_token})
437
437
438 assert_session_flash(
438 assert_session_flash(
439 response, u'Pull request title & description updated.',
439 response, u'Pull request title & description updated.',
440 category='success')
440 category='success')
441
441
442 pull_request = PullRequest.get(pull_request_id)
442 pull_request = PullRequest.get(pull_request_id)
443 assert pull_request.title_safe == 'New title {{}} {{2}} {{foo}}'
443 assert pull_request.title_safe == 'New title {{}} {{2}} {{foo}}'
444
444
445 def test_edit_title_description_closed(self, pr_util, csrf_token):
445 def test_edit_title_description_closed(self, pr_util, csrf_token):
446 pull_request = pr_util.create_pull_request()
446 pull_request = pr_util.create_pull_request()
447 pull_request_id = pull_request.pull_request_id
447 pull_request_id = pull_request.pull_request_id
448 repo_name = pull_request.target_repo.repo_name
448 repo_name = pull_request.target_repo.repo_name
449 pr_util.close()
449 pr_util.close()
450
450
451 response = self.app.post(
451 response = self.app.post(
452 route_path('pullrequest_update',
452 route_path('pullrequest_update',
453 repo_name=repo_name, pull_request_id=pull_request_id),
453 repo_name=repo_name, pull_request_id=pull_request_id),
454 params={
454 params={
455 'edit_pull_request': 'true',
455 'edit_pull_request': 'true',
456 'title': 'New title',
456 'title': 'New title',
457 'description': 'New description',
457 'description': 'New description',
458 'csrf_token': csrf_token}, status=200)
458 'csrf_token': csrf_token}, status=200)
459 assert_session_flash(
459 assert_session_flash(
460 response, u'Cannot update closed pull requests.',
460 response, u'Cannot update closed pull requests.',
461 category='error')
461 category='error')
462
462
463 def test_update_invalid_source_reference(self, pr_util, csrf_token):
463 def test_update_invalid_source_reference(self, pr_util, csrf_token):
464 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
464 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
465
465
466 pull_request = pr_util.create_pull_request()
466 pull_request = pr_util.create_pull_request()
467 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
467 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
468 Session().add(pull_request)
468 Session().add(pull_request)
469 Session().commit()
469 Session().commit()
470
470
471 pull_request_id = pull_request.pull_request_id
471 pull_request_id = pull_request.pull_request_id
472
472
473 response = self.app.post(
473 response = self.app.post(
474 route_path('pullrequest_update',
474 route_path('pullrequest_update',
475 repo_name=pull_request.target_repo.repo_name,
475 repo_name=pull_request.target_repo.repo_name,
476 pull_request_id=pull_request_id),
476 pull_request_id=pull_request_id),
477 params={'update_commits': 'true', 'csrf_token': csrf_token})
477 params={'update_commits': 'true', 'csrf_token': csrf_token})
478
478
479 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
479 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
480 UpdateFailureReason.MISSING_SOURCE_REF])
480 UpdateFailureReason.MISSING_SOURCE_REF])
481 assert_session_flash(response, expected_msg, category='error')
481 assert_session_flash(response, expected_msg, category='error')
482
482
483 def test_missing_target_reference(self, pr_util, csrf_token):
483 def test_missing_target_reference(self, pr_util, csrf_token):
484 from rhodecode.lib.vcs.backends.base import MergeFailureReason
484 from rhodecode.lib.vcs.backends.base import MergeFailureReason
485 pull_request = pr_util.create_pull_request(
485 pull_request = pr_util.create_pull_request(
486 approved=True, mergeable=True)
486 approved=True, mergeable=True)
487 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
487 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
488 pull_request.target_ref = unicode_reference
488 pull_request.target_ref = unicode_reference
489 Session().add(pull_request)
489 Session().add(pull_request)
490 Session().commit()
490 Session().commit()
491
491
492 pull_request_id = pull_request.pull_request_id
492 pull_request_id = pull_request.pull_request_id
493 pull_request_url = route_path(
493 pull_request_url = route_path(
494 'pullrequest_show',
494 'pullrequest_show',
495 repo_name=pull_request.target_repo.repo_name,
495 repo_name=pull_request.target_repo.repo_name,
496 pull_request_id=pull_request_id)
496 pull_request_id=pull_request_id)
497
497
498 response = self.app.get(pull_request_url)
498 response = self.app.get(pull_request_url)
499 target_ref_id = 'invalid-branch'
499 target_ref_id = 'invalid-branch'
500 merge_resp = MergeResponse(
500 merge_resp = MergeResponse(
501 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
501 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
502 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
502 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
503 response.assert_response().element_contains(
503 response.assert_response().element_contains(
504 'div[data-role="merge-message"]', merge_resp.merge_status_message)
504 'div[data-role="merge-message"]', merge_resp.merge_status_message)
505
505
506 def test_comment_and_close_pull_request_custom_message_approved(
506 def test_comment_and_close_pull_request_custom_message_approved(
507 self, pr_util, csrf_token, xhr_header):
507 self, pr_util, csrf_token, xhr_header):
508
508
509 pull_request = pr_util.create_pull_request(approved=True)
509 pull_request = pr_util.create_pull_request(approved=True)
510 pull_request_id = pull_request.pull_request_id
510 pull_request_id = pull_request.pull_request_id
511 author = pull_request.user_id
511 author = pull_request.user_id
512 repo = pull_request.target_repo.repo_id
512 repo = pull_request.target_repo.repo_id
513
513
514 self.app.post(
514 self.app.post(
515 route_path('pullrequest_comment_create',
515 route_path('pullrequest_comment_create',
516 repo_name=pull_request.target_repo.scm_instance().name,
516 repo_name=pull_request.target_repo.scm_instance().name,
517 pull_request_id=pull_request_id),
517 pull_request_id=pull_request_id),
518 params={
518 params={
519 'close_pull_request': '1',
519 'close_pull_request': '1',
520 'text': 'Closing a PR',
520 'text': 'Closing a PR',
521 'csrf_token': csrf_token},
521 'csrf_token': csrf_token},
522 extra_environ=xhr_header,)
522 extra_environ=xhr_header,)
523
523
524 journal = UserLog.query()\
524 journal = UserLog.query()\
525 .filter(UserLog.user_id == author)\
525 .filter(UserLog.user_id == author)\
526 .filter(UserLog.repository_id == repo) \
526 .filter(UserLog.repository_id == repo) \
527 .order_by(UserLog.user_log_id.asc()) \
527 .order_by(UserLog.user_log_id.asc()) \
528 .all()
528 .all()
529 assert journal[-1].action == 'repo.pull_request.close'
529 assert journal[-1].action == 'repo.pull_request.close'
530
530
531 pull_request = PullRequest.get(pull_request_id)
531 pull_request = PullRequest.get(pull_request_id)
532 assert pull_request.is_closed()
532 assert pull_request.is_closed()
533
533
534 status = ChangesetStatusModel().get_status(
534 status = ChangesetStatusModel().get_status(
535 pull_request.source_repo, pull_request=pull_request)
535 pull_request.source_repo, pull_request=pull_request)
536 assert status == ChangesetStatus.STATUS_APPROVED
536 assert status == ChangesetStatus.STATUS_APPROVED
537 comments = ChangesetComment().query() \
537 comments = ChangesetComment().query() \
538 .filter(ChangesetComment.pull_request == pull_request) \
538 .filter(ChangesetComment.pull_request == pull_request) \
539 .order_by(ChangesetComment.comment_id.asc())\
539 .order_by(ChangesetComment.comment_id.asc())\
540 .all()
540 .all()
541 assert comments[-1].text == 'Closing a PR'
541 assert comments[-1].text == 'Closing a PR'
542
542
543 def test_comment_force_close_pull_request_rejected(
543 def test_comment_force_close_pull_request_rejected(
544 self, pr_util, csrf_token, xhr_header):
544 self, pr_util, csrf_token, xhr_header):
545 pull_request = pr_util.create_pull_request()
545 pull_request = pr_util.create_pull_request()
546 pull_request_id = pull_request.pull_request_id
546 pull_request_id = pull_request.pull_request_id
547 PullRequestModel().update_reviewers(
547 PullRequestModel().update_reviewers(
548 pull_request_id, [
548 pull_request_id, [
549 (1, ['reason'], False, 'reviewer', []),
549 (1, ['reason'], False, 'reviewer', []),
550 (2, ['reason2'], False, 'reviewer', [])],
550 (2, ['reason2'], False, 'reviewer', [])],
551 pull_request.author)
551 pull_request.author)
552 author = pull_request.user_id
552 author = pull_request.user_id
553 repo = pull_request.target_repo.repo_id
553 repo = pull_request.target_repo.repo_id
554
554
555 self.app.post(
555 self.app.post(
556 route_path('pullrequest_comment_create',
556 route_path('pullrequest_comment_create',
557 repo_name=pull_request.target_repo.scm_instance().name,
557 repo_name=pull_request.target_repo.scm_instance().name,
558 pull_request_id=pull_request_id),
558 pull_request_id=pull_request_id),
559 params={
559 params={
560 'close_pull_request': '1',
560 'close_pull_request': '1',
561 'csrf_token': csrf_token},
561 'csrf_token': csrf_token},
562 extra_environ=xhr_header)
562 extra_environ=xhr_header)
563
563
564 pull_request = PullRequest.get(pull_request_id)
564 pull_request = PullRequest.get(pull_request_id)
565
565
566 journal = UserLog.query()\
566 journal = UserLog.query()\
567 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
567 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
568 .order_by(UserLog.user_log_id.asc()) \
568 .order_by(UserLog.user_log_id.asc()) \
569 .all()
569 .all()
570 assert journal[-1].action == 'repo.pull_request.close'
570 assert journal[-1].action == 'repo.pull_request.close'
571
571
572 # check only the latest status, not the review status
572 # check only the latest status, not the review status
573 status = ChangesetStatusModel().get_status(
573 status = ChangesetStatusModel().get_status(
574 pull_request.source_repo, pull_request=pull_request)
574 pull_request.source_repo, pull_request=pull_request)
575 assert status == ChangesetStatus.STATUS_REJECTED
575 assert status == ChangesetStatus.STATUS_REJECTED
576
576
577 def test_comment_and_close_pull_request(
577 def test_comment_and_close_pull_request(
578 self, pr_util, csrf_token, xhr_header):
578 self, pr_util, csrf_token, xhr_header):
579 pull_request = pr_util.create_pull_request()
579 pull_request = pr_util.create_pull_request()
580 pull_request_id = pull_request.pull_request_id
580 pull_request_id = pull_request.pull_request_id
581
581
582 response = self.app.post(
582 response = self.app.post(
583 route_path('pullrequest_comment_create',
583 route_path('pullrequest_comment_create',
584 repo_name=pull_request.target_repo.scm_instance().name,
584 repo_name=pull_request.target_repo.scm_instance().name,
585 pull_request_id=pull_request.pull_request_id),
585 pull_request_id=pull_request.pull_request_id),
586 params={
586 params={
587 'close_pull_request': 'true',
587 'close_pull_request': 'true',
588 'csrf_token': csrf_token},
588 'csrf_token': csrf_token},
589 extra_environ=xhr_header)
589 extra_environ=xhr_header)
590
590
591 assert response.json
591 assert response.json
592
592
593 pull_request = PullRequest.get(pull_request_id)
593 pull_request = PullRequest.get(pull_request_id)
594 assert pull_request.is_closed()
594 assert pull_request.is_closed()
595
595
596 # check only the latest status, not the review status
596 # check only the latest status, not the review status
597 status = ChangesetStatusModel().get_status(
597 status = ChangesetStatusModel().get_status(
598 pull_request.source_repo, pull_request=pull_request)
598 pull_request.source_repo, pull_request=pull_request)
599 assert status == ChangesetStatus.STATUS_REJECTED
599 assert status == ChangesetStatus.STATUS_REJECTED
600
600
601 def test_comment_and_close_pull_request_try_edit_comment(
601 def test_comment_and_close_pull_request_try_edit_comment(
602 self, pr_util, csrf_token, xhr_header
602 self, pr_util, csrf_token, xhr_header
603 ):
603 ):
604 pull_request = pr_util.create_pull_request()
604 pull_request = pr_util.create_pull_request()
605 pull_request_id = pull_request.pull_request_id
605 pull_request_id = pull_request.pull_request_id
606 target_scm = pull_request.target_repo.scm_instance()
606 target_scm = pull_request.target_repo.scm_instance()
607 target_scm_name = target_scm.name
607 target_scm_name = target_scm.name
608
608
609 response = self.app.post(
609 response = self.app.post(
610 route_path(
610 route_path(
611 'pullrequest_comment_create',
611 'pullrequest_comment_create',
612 repo_name=target_scm_name,
612 repo_name=target_scm_name,
613 pull_request_id=pull_request_id,
613 pull_request_id=pull_request_id,
614 ),
614 ),
615 params={
615 params={
616 'close_pull_request': 'true',
616 'close_pull_request': 'true',
617 'csrf_token': csrf_token,
617 'csrf_token': csrf_token,
618 },
618 },
619 extra_environ=xhr_header)
619 extra_environ=xhr_header)
620
620
621 assert response.json
621 assert response.json
622
622
623 pull_request = PullRequest.get(pull_request_id)
623 pull_request = PullRequest.get(pull_request_id)
624 target_scm = pull_request.target_repo.scm_instance()
624 target_scm = pull_request.target_repo.scm_instance()
625 target_scm_name = target_scm.name
625 target_scm_name = target_scm.name
626 assert pull_request.is_closed()
626 assert pull_request.is_closed()
627
627
628 # check only the latest status, not the review status
628 # check only the latest status, not the review status
629 status = ChangesetStatusModel().get_status(
629 status = ChangesetStatusModel().get_status(
630 pull_request.source_repo, pull_request=pull_request)
630 pull_request.source_repo, pull_request=pull_request)
631 assert status == ChangesetStatus.STATUS_REJECTED
631 assert status == ChangesetStatus.STATUS_REJECTED
632
632
633 for comment_id in response.json.keys():
633 for comment_id in response.json.keys():
634 test_text = 'test'
634 test_text = 'test'
635 response = self.app.post(
635 response = self.app.post(
636 route_path(
636 route_path(
637 'pullrequest_comment_edit',
637 'pullrequest_comment_edit',
638 repo_name=target_scm_name,
638 repo_name=target_scm_name,
639 pull_request_id=pull_request_id,
639 pull_request_id=pull_request_id,
640 comment_id=comment_id,
640 comment_id=comment_id,
641 ),
641 ),
642 extra_environ=xhr_header,
642 extra_environ=xhr_header,
643 params={
643 params={
644 'csrf_token': csrf_token,
644 'csrf_token': csrf_token,
645 'text': test_text,
645 'text': test_text,
646 },
646 },
647 status=403,
647 status=403,
648 )
648 )
649 assert response.status_int == 403
649 assert response.status_int == 403
650
650
651 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
651 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
652 pull_request = pr_util.create_pull_request()
652 pull_request = pr_util.create_pull_request()
653 target_scm = pull_request.target_repo.scm_instance()
653 target_scm = pull_request.target_repo.scm_instance()
654 target_scm_name = target_scm.name
654 target_scm_name = target_scm.name
655
655
656 response = self.app.post(
656 response = self.app.post(
657 route_path(
657 route_path(
658 'pullrequest_comment_create',
658 'pullrequest_comment_create',
659 repo_name=target_scm_name,
659 repo_name=target_scm_name,
660 pull_request_id=pull_request.pull_request_id),
660 pull_request_id=pull_request.pull_request_id),
661 params={
661 params={
662 'csrf_token': csrf_token,
662 'csrf_token': csrf_token,
663 'text': 'init',
663 'text': 'init',
664 },
664 },
665 extra_environ=xhr_header,
665 extra_environ=xhr_header,
666 )
666 )
667 assert response.json
667 assert response.json
668
668
669 for comment_id in response.json.keys():
669 for comment_id in response.json.keys():
670 assert comment_id
670 assert comment_id
671 test_text = 'test'
671 test_text = 'test'
672 self.app.post(
672 self.app.post(
673 route_path(
673 route_path(
674 'pullrequest_comment_edit',
674 'pullrequest_comment_edit',
675 repo_name=target_scm_name,
675 repo_name=target_scm_name,
676 pull_request_id=pull_request.pull_request_id,
676 pull_request_id=pull_request.pull_request_id,
677 comment_id=comment_id,
677 comment_id=comment_id,
678 ),
678 ),
679 extra_environ=xhr_header,
679 extra_environ=xhr_header,
680 params={
680 params={
681 'csrf_token': csrf_token,
681 'csrf_token': csrf_token,
682 'text': test_text,
682 'text': test_text,
683 'version': '0',
683 'version': '0',
684 },
684 },
685
685
686 )
686 )
687 text_form_db = ChangesetComment.query().filter(
687 text_form_db = ChangesetComment.query().filter(
688 ChangesetComment.comment_id == comment_id).first().text
688 ChangesetComment.comment_id == comment_id).first().text
689 assert test_text == text_form_db
689 assert test_text == text_form_db
690
690
691 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
691 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
692 pull_request = pr_util.create_pull_request()
692 pull_request = pr_util.create_pull_request()
693 target_scm = pull_request.target_repo.scm_instance()
693 target_scm = pull_request.target_repo.scm_instance()
694 target_scm_name = target_scm.name
694 target_scm_name = target_scm.name
695
695
696 response = self.app.post(
696 response = self.app.post(
697 route_path(
697 route_path(
698 'pullrequest_comment_create',
698 'pullrequest_comment_create',
699 repo_name=target_scm_name,
699 repo_name=target_scm_name,
700 pull_request_id=pull_request.pull_request_id),
700 pull_request_id=pull_request.pull_request_id),
701 params={
701 params={
702 'csrf_token': csrf_token,
702 'csrf_token': csrf_token,
703 'text': 'init',
703 'text': 'init',
704 },
704 },
705 extra_environ=xhr_header,
705 extra_environ=xhr_header,
706 )
706 )
707 assert response.json
707 assert response.json
708
708
709 for comment_id in response.json.keys():
709 for comment_id in response.json.keys():
710 test_text = 'init'
710 test_text = 'init'
711 response = self.app.post(
711 response = self.app.post(
712 route_path(
712 route_path(
713 'pullrequest_comment_edit',
713 'pullrequest_comment_edit',
714 repo_name=target_scm_name,
714 repo_name=target_scm_name,
715 pull_request_id=pull_request.pull_request_id,
715 pull_request_id=pull_request.pull_request_id,
716 comment_id=comment_id,
716 comment_id=comment_id,
717 ),
717 ),
718 extra_environ=xhr_header,
718 extra_environ=xhr_header,
719 params={
719 params={
720 'csrf_token': csrf_token,
720 'csrf_token': csrf_token,
721 'text': test_text,
721 'text': test_text,
722 'version': '0',
722 'version': '0',
723 },
723 },
724 status=404,
724 status=404,
725
725
726 )
726 )
727 assert response.status_int == 404
727 assert response.status_int == 404
728
728
729 def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header):
729 def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header):
730 pull_request = pr_util.create_pull_request()
730 pull_request = pr_util.create_pull_request()
731 target_scm = pull_request.target_repo.scm_instance()
731 target_scm = pull_request.target_repo.scm_instance()
732 target_scm_name = target_scm.name
732 target_scm_name = target_scm.name
733
733
734 response = self.app.post(
734 response = self.app.post(
735 route_path(
735 route_path(
736 'pullrequest_comment_create',
736 'pullrequest_comment_create',
737 repo_name=target_scm_name,
737 repo_name=target_scm_name,
738 pull_request_id=pull_request.pull_request_id),
738 pull_request_id=pull_request.pull_request_id),
739 params={
739 params={
740 'csrf_token': csrf_token,
740 'csrf_token': csrf_token,
741 'text': 'init',
741 'text': 'init',
742 },
742 },
743 extra_environ=xhr_header,
743 extra_environ=xhr_header,
744 )
744 )
745 assert response.json
745 assert response.json
746 for comment_id in response.json.keys():
746 for comment_id in response.json.keys():
747 test_text = 'test'
747 test_text = 'test'
748 self.app.post(
748 self.app.post(
749 route_path(
749 route_path(
750 'pullrequest_comment_edit',
750 'pullrequest_comment_edit',
751 repo_name=target_scm_name,
751 repo_name=target_scm_name,
752 pull_request_id=pull_request.pull_request_id,
752 pull_request_id=pull_request.pull_request_id,
753 comment_id=comment_id,
753 comment_id=comment_id,
754 ),
754 ),
755 extra_environ=xhr_header,
755 extra_environ=xhr_header,
756 params={
756 params={
757 'csrf_token': csrf_token,
757 'csrf_token': csrf_token,
758 'text': test_text,
758 'text': test_text,
759 'version': '0',
759 'version': '0',
760 },
760 },
761
761
762 )
762 )
763 test_text_v2 = 'test_v2'
763 test_text_v2 = 'test_v2'
764 response = self.app.post(
764 response = self.app.post(
765 route_path(
765 route_path(
766 'pullrequest_comment_edit',
766 'pullrequest_comment_edit',
767 repo_name=target_scm_name,
767 repo_name=target_scm_name,
768 pull_request_id=pull_request.pull_request_id,
768 pull_request_id=pull_request.pull_request_id,
769 comment_id=comment_id,
769 comment_id=comment_id,
770 ),
770 ),
771 extra_environ=xhr_header,
771 extra_environ=xhr_header,
772 params={
772 params={
773 'csrf_token': csrf_token,
773 'csrf_token': csrf_token,
774 'text': test_text_v2,
774 'text': test_text_v2,
775 'version': '0',
775 'version': '0',
776 },
776 },
777 status=409,
777 status=409,
778 )
778 )
779 assert response.status_int == 409
779 assert response.status_int == 409
780
780
781 text_form_db = ChangesetComment.query().filter(
781 text_form_db = ChangesetComment.query().filter(
782 ChangesetComment.comment_id == comment_id).first().text
782 ChangesetComment.comment_id == comment_id).first().text
783
783
784 assert test_text == text_form_db
784 assert test_text == text_form_db
785 assert test_text_v2 != text_form_db
785 assert test_text_v2 != text_form_db
786
786
787 def test_comment_and_comment_edit_permissions_forbidden(
787 def test_comment_and_comment_edit_permissions_forbidden(
788 self, autologin_regular_user, user_regular, user_admin, pr_util,
788 self, autologin_regular_user, user_regular, user_admin, pr_util,
789 csrf_token, xhr_header):
789 csrf_token, xhr_header):
790 pull_request = pr_util.create_pull_request(
790 pull_request = pr_util.create_pull_request(
791 author=user_admin.username, enable_notifications=False)
791 author=user_admin.username, enable_notifications=False)
792 comment = CommentsModel().create(
792 comment = CommentsModel().create(
793 text='test',
793 text='test',
794 repo=pull_request.target_repo.scm_instance().name,
794 repo=pull_request.target_repo.scm_instance().name,
795 user=user_admin,
795 user=user_admin,
796 pull_request=pull_request,
796 pull_request=pull_request,
797 )
797 )
798 response = self.app.post(
798 response = self.app.post(
799 route_path(
799 route_path(
800 'pullrequest_comment_edit',
800 'pullrequest_comment_edit',
801 repo_name=pull_request.target_repo.scm_instance().name,
801 repo_name=pull_request.target_repo.scm_instance().name,
802 pull_request_id=pull_request.pull_request_id,
802 pull_request_id=pull_request.pull_request_id,
803 comment_id=comment.comment_id,
803 comment_id=comment.comment_id,
804 ),
804 ),
805 extra_environ=xhr_header,
805 extra_environ=xhr_header,
806 params={
806 params={
807 'csrf_token': csrf_token,
807 'csrf_token': csrf_token,
808 'text': 'test_text',
808 'text': 'test_text',
809 },
809 },
810 status=403,
810 status=403,
811 )
811 )
812 assert response.status_int == 403
812 assert response.status_int == 403
813
813
814 def test_create_pull_request(self, backend, csrf_token):
814 def test_create_pull_request(self, backend, csrf_token):
815 commits = [
815 commits = [
816 {'message': 'ancestor'},
816 {'message': 'ancestor'},
817 {'message': 'change'},
817 {'message': 'change'},
818 {'message': 'change2'},
818 {'message': 'change2'},
819 ]
819 ]
820 commit_ids = backend.create_master_repo(commits)
820 commit_ids = backend.create_master_repo(commits)
821 target = backend.create_repo(heads=['ancestor'])
821 target = backend.create_repo(heads=['ancestor'])
822 source = backend.create_repo(heads=['change2'])
822 source = backend.create_repo(heads=['change2'])
823
823
824 response = self.app.post(
824 response = self.app.post(
825 route_path('pullrequest_create', repo_name=source.repo_name),
825 route_path('pullrequest_create', repo_name=source.repo_name),
826 [
826 [
827 ('source_repo', source.repo_name),
827 ('source_repo', source.repo_name),
828 ('source_ref', 'branch:default:' + commit_ids['change2']),
828 ('source_ref', 'branch:default:' + commit_ids['change2']),
829 ('target_repo', target.repo_name),
829 ('target_repo', target.repo_name),
830 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
830 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
831 ('common_ancestor', commit_ids['ancestor']),
831 ('common_ancestor', commit_ids['ancestor']),
832 ('pullrequest_title', 'Title'),
832 ('pullrequest_title', 'Title'),
833 ('pullrequest_desc', 'Description'),
833 ('pullrequest_desc', 'Description'),
834 ('description_renderer', 'markdown'),
834 ('description_renderer', 'markdown'),
835 ('__start__', 'review_members:sequence'),
835 ('__start__', 'review_members:sequence'),
836 ('__start__', 'reviewer:mapping'),
836 ('__start__', 'reviewer:mapping'),
837 ('user_id', '1'),
837 ('user_id', '1'),
838 ('__start__', 'reasons:sequence'),
838 ('__start__', 'reasons:sequence'),
839 ('reason', 'Some reason'),
839 ('reason', 'Some reason'),
840 ('__end__', 'reasons:sequence'),
840 ('__end__', 'reasons:sequence'),
841 ('__start__', 'rules:sequence'),
841 ('__start__', 'rules:sequence'),
842 ('__end__', 'rules:sequence'),
842 ('__end__', 'rules:sequence'),
843 ('mandatory', 'False'),
843 ('mandatory', 'False'),
844 ('__end__', 'reviewer:mapping'),
844 ('__end__', 'reviewer:mapping'),
845 ('__end__', 'review_members:sequence'),
845 ('__end__', 'review_members:sequence'),
846 ('__start__', 'revisions:sequence'),
846 ('__start__', 'revisions:sequence'),
847 ('revisions', commit_ids['change']),
847 ('revisions', commit_ids['change']),
848 ('revisions', commit_ids['change2']),
848 ('revisions', commit_ids['change2']),
849 ('__end__', 'revisions:sequence'),
849 ('__end__', 'revisions:sequence'),
850 ('user', ''),
850 ('user', ''),
851 ('csrf_token', csrf_token),
851 ('csrf_token', csrf_token),
852 ],
852 ],
853 status=302)
853 status=302)
854
854
855 location = response.headers['Location']
855 location = response.headers['Location']
856 pull_request_id = location.rsplit('/', 1)[1]
856 pull_request_id = location.rsplit('/', 1)[1]
857 assert pull_request_id != 'new'
857 assert pull_request_id != 'new'
858 pull_request = PullRequest.get(int(pull_request_id))
858 pull_request = PullRequest.get(int(pull_request_id))
859
859
860 # check that we have now both revisions
860 # check that we have now both revisions
861 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
861 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
862 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
862 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
863 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
863 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
864 assert pull_request.target_ref == expected_target_ref
864 assert pull_request.target_ref == expected_target_ref
865
865
866 def test_reviewer_notifications(self, backend, csrf_token):
866 def test_reviewer_notifications(self, backend, csrf_token):
867 # We have to use the app.post for this test so it will create the
867 # We have to use the app.post for this test so it will create the
868 # notifications properly with the new PR
868 # notifications properly with the new PR
869 commits = [
869 commits = [
870 {'message': 'ancestor',
870 {'message': 'ancestor',
871 'added': [FileNode('file_A', content='content_of_ancestor')]},
871 'added': [FileNode('file_A', content='content_of_ancestor')]},
872 {'message': 'change',
872 {'message': 'change',
873 'added': [FileNode('file_a', content='content_of_change')]},
873 'added': [FileNode('file_a', content='content_of_change')]},
874 {'message': 'change-child'},
874 {'message': 'change-child'},
875 {'message': 'ancestor-child', 'parents': ['ancestor'],
875 {'message': 'ancestor-child', 'parents': ['ancestor'],
876 'added': [
876 'added': [
877 FileNode('file_B', content='content_of_ancestor_child')]},
877 FileNode('file_B', content='content_of_ancestor_child')]},
878 {'message': 'ancestor-child-2'},
878 {'message': 'ancestor-child-2'},
879 ]
879 ]
880 commit_ids = backend.create_master_repo(commits)
880 commit_ids = backend.create_master_repo(commits)
881 target = backend.create_repo(heads=['ancestor-child'])
881 target = backend.create_repo(heads=['ancestor-child'])
882 source = backend.create_repo(heads=['change'])
882 source = backend.create_repo(heads=['change'])
883
883
884 response = self.app.post(
884 response = self.app.post(
885 route_path('pullrequest_create', repo_name=source.repo_name),
885 route_path('pullrequest_create', repo_name=source.repo_name),
886 [
886 [
887 ('source_repo', source.repo_name),
887 ('source_repo', source.repo_name),
888 ('source_ref', 'branch:default:' + commit_ids['change']),
888 ('source_ref', 'branch:default:' + commit_ids['change']),
889 ('target_repo', target.repo_name),
889 ('target_repo', target.repo_name),
890 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
890 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
891 ('common_ancestor', commit_ids['ancestor']),
891 ('common_ancestor', commit_ids['ancestor']),
892 ('pullrequest_title', 'Title'),
892 ('pullrequest_title', 'Title'),
893 ('pullrequest_desc', 'Description'),
893 ('pullrequest_desc', 'Description'),
894 ('description_renderer', 'markdown'),
894 ('description_renderer', 'markdown'),
895 ('__start__', 'review_members:sequence'),
895 ('__start__', 'review_members:sequence'),
896 ('__start__', 'reviewer:mapping'),
896 ('__start__', 'reviewer:mapping'),
897 ('user_id', '2'),
897 ('user_id', '2'),
898 ('__start__', 'reasons:sequence'),
898 ('__start__', 'reasons:sequence'),
899 ('reason', 'Some reason'),
899 ('reason', 'Some reason'),
900 ('__end__', 'reasons:sequence'),
900 ('__end__', 'reasons:sequence'),
901 ('__start__', 'rules:sequence'),
901 ('__start__', 'rules:sequence'),
902 ('__end__', 'rules:sequence'),
902 ('__end__', 'rules:sequence'),
903 ('mandatory', 'False'),
903 ('mandatory', 'False'),
904 ('__end__', 'reviewer:mapping'),
904 ('__end__', 'reviewer:mapping'),
905 ('__end__', 'review_members:sequence'),
905 ('__end__', 'review_members:sequence'),
906 ('__start__', 'revisions:sequence'),
906 ('__start__', 'revisions:sequence'),
907 ('revisions', commit_ids['change']),
907 ('revisions', commit_ids['change']),
908 ('__end__', 'revisions:sequence'),
908 ('__end__', 'revisions:sequence'),
909 ('user', ''),
909 ('user', ''),
910 ('csrf_token', csrf_token),
910 ('csrf_token', csrf_token),
911 ],
911 ],
912 status=302)
912 status=302)
913
913
914 location = response.headers['Location']
914 location = response.headers['Location']
915
915
916 pull_request_id = location.rsplit('/', 1)[1]
916 pull_request_id = location.rsplit('/', 1)[1]
917 assert pull_request_id != 'new'
917 assert pull_request_id != 'new'
918 pull_request = PullRequest.get(int(pull_request_id))
918 pull_request = PullRequest.get(int(pull_request_id))
919
919
920 # Check that a notification was made
920 # Check that a notification was made
921 notifications = Notification.query()\
921 notifications = Notification.query()\
922 .filter(Notification.created_by == pull_request.author.user_id,
922 .filter(Notification.created_by == pull_request.author.user_id,
923 Notification.type_ == Notification.TYPE_PULL_REQUEST,
923 Notification.type_ == Notification.TYPE_PULL_REQUEST,
924 Notification.subject.contains(
924 Notification.subject.contains(
925 "requested a pull request review. !%s" % pull_request_id))
925 "requested a pull request review. !%s" % pull_request_id))
926 assert len(notifications.all()) == 1
926 assert len(notifications.all()) == 1
927
927
928 # Change reviewers and check that a notification was made
928 # Change reviewers and check that a notification was made
929 PullRequestModel().update_reviewers(
929 PullRequestModel().update_reviewers(
930 pull_request.pull_request_id, [
930 pull_request.pull_request_id, [
931 (1, [], False, 'reviewer', [])
931 (1, [], False, 'reviewer', [])
932 ],
932 ],
933 pull_request.author)
933 pull_request.author)
934 assert len(notifications.all()) == 2
934 assert len(notifications.all()) == 2
935
935
936 def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token):
936 def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token):
937 commits = [
937 commits = [
938 {'message': 'ancestor',
938 {'message': 'ancestor',
939 'added': [FileNode('file_A', content='content_of_ancestor')]},
939 'added': [FileNode('file_A', content='content_of_ancestor')]},
940 {'message': 'change',
940 {'message': 'change',
941 'added': [FileNode('file_a', content='content_of_change')]},
941 'added': [FileNode('file_a', content='content_of_change')]},
942 {'message': 'change-child'},
942 {'message': 'change-child'},
943 {'message': 'ancestor-child', 'parents': ['ancestor'],
943 {'message': 'ancestor-child', 'parents': ['ancestor'],
944 'added': [
944 'added': [
945 FileNode('file_B', content='content_of_ancestor_child')]},
945 FileNode('file_B', content='content_of_ancestor_child')]},
946 {'message': 'ancestor-child-2'},
946 {'message': 'ancestor-child-2'},
947 ]
947 ]
948 commit_ids = backend.create_master_repo(commits)
948 commit_ids = backend.create_master_repo(commits)
949 target = backend.create_repo(heads=['ancestor-child'])
949 target = backend.create_repo(heads=['ancestor-child'])
950 source = backend.create_repo(heads=['change'])
950 source = backend.create_repo(heads=['change'])
951
951
952 response = self.app.post(
952 response = self.app.post(
953 route_path('pullrequest_create', repo_name=source.repo_name),
953 route_path('pullrequest_create', repo_name=source.repo_name),
954 [
954 [
955 ('source_repo', source.repo_name),
955 ('source_repo', source.repo_name),
956 ('source_ref', 'branch:default:' + commit_ids['change']),
956 ('source_ref', 'branch:default:' + commit_ids['change']),
957 ('target_repo', target.repo_name),
957 ('target_repo', target.repo_name),
958 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
958 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
959 ('common_ancestor', commit_ids['ancestor']),
959 ('common_ancestor', commit_ids['ancestor']),
960 ('pullrequest_title', 'Title'),
960 ('pullrequest_title', 'Title'),
961 ('pullrequest_desc', 'Description'),
961 ('pullrequest_desc', 'Description'),
962 ('description_renderer', 'markdown'),
962 ('description_renderer', 'markdown'),
963 ('__start__', 'review_members:sequence'),
963 ('__start__', 'review_members:sequence'),
964 ('__start__', 'reviewer:mapping'),
964 ('__start__', 'reviewer:mapping'),
965 ('user_id', '1'),
965 ('user_id', '1'),
966 ('__start__', 'reasons:sequence'),
966 ('__start__', 'reasons:sequence'),
967 ('reason', 'Some reason'),
967 ('reason', 'Some reason'),
968 ('__end__', 'reasons:sequence'),
968 ('__end__', 'reasons:sequence'),
969 ('__start__', 'rules:sequence'),
969 ('__start__', 'rules:sequence'),
970 ('__end__', 'rules:sequence'),
970 ('__end__', 'rules:sequence'),
971 ('mandatory', 'False'),
971 ('mandatory', 'False'),
972 ('__end__', 'reviewer:mapping'),
972 ('__end__', 'reviewer:mapping'),
973 ('__end__', 'review_members:sequence'),
973 ('__end__', 'review_members:sequence'),
974 ('__start__', 'revisions:sequence'),
974 ('__start__', 'revisions:sequence'),
975 ('revisions', commit_ids['change']),
975 ('revisions', commit_ids['change']),
976 ('__end__', 'revisions:sequence'),
976 ('__end__', 'revisions:sequence'),
977 ('user', ''),
977 ('user', ''),
978 ('csrf_token', csrf_token),
978 ('csrf_token', csrf_token),
979 ],
979 ],
980 status=302)
980 status=302)
981
981
982 location = response.headers['Location']
982 location = response.headers['Location']
983
983
984 pull_request_id = location.rsplit('/', 1)[1]
984 pull_request_id = location.rsplit('/', 1)[1]
985 assert pull_request_id != 'new'
985 assert pull_request_id != 'new'
986 pull_request = PullRequest.get(int(pull_request_id))
986 pull_request = PullRequest.get(int(pull_request_id))
987
987
988 # target_ref has to point to the ancestor's commit_id in order to
988 # target_ref has to point to the ancestor's commit_id in order to
989 # show the correct diff
989 # show the correct diff
990 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
990 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
991 assert pull_request.target_ref == expected_target_ref
991 assert pull_request.target_ref == expected_target_ref
992
992
993 # Check generated diff contents
993 # Check generated diff contents
994 response = response.follow()
994 response = response.follow()
995 response.mustcontain(no=['content_of_ancestor'])
995 response.mustcontain(no=['content_of_ancestor'])
996 response.mustcontain(no=['content_of_ancestor-child'])
996 response.mustcontain(no=['content_of_ancestor-child'])
997 response.mustcontain('content_of_change')
997 response.mustcontain('content_of_change')
998
998
999 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
999 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
1000 # Clear any previous calls to rcextensions
1000 # Clear any previous calls to rcextensions
1001 rhodecode.EXTENSIONS.calls.clear()
1001 rhodecode.EXTENSIONS.calls.clear()
1002
1002
1003 pull_request = pr_util.create_pull_request(
1003 pull_request = pr_util.create_pull_request(
1004 approved=True, mergeable=True)
1004 approved=True, mergeable=True)
1005 pull_request_id = pull_request.pull_request_id
1005 pull_request_id = pull_request.pull_request_id
1006 repo_name = pull_request.target_repo.scm_instance().name,
1006 repo_name = pull_request.target_repo.scm_instance().name,
1007
1007
1008 url = route_path('pullrequest_merge',
1008 url = route_path('pullrequest_merge',
1009 repo_name=str(repo_name[0]),
1009 repo_name=str(repo_name[0]),
1010 pull_request_id=pull_request_id)
1010 pull_request_id=pull_request_id)
1011 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
1011 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
1012
1012
1013 pull_request = PullRequest.get(pull_request_id)
1013 pull_request = PullRequest.get(pull_request_id)
1014
1014
1015 assert response.status_int == 200
1015 assert response.status_int == 200
1016 assert pull_request.is_closed()
1016 assert pull_request.is_closed()
1017 assert_pull_request_status(
1017 assert_pull_request_status(
1018 pull_request, ChangesetStatus.STATUS_APPROVED)
1018 pull_request, ChangesetStatus.STATUS_APPROVED)
1019
1019
1020 # Check the relevant log entries were added
1020 # Check the relevant log entries were added
1021 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
1021 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
1022 actions = [log.action for log in user_logs]
1022 actions = [log.action for log in user_logs]
1023 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
1023 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
1024 expected_actions = [
1024 expected_actions = [
1025 u'repo.pull_request.close',
1025 u'repo.pull_request.close',
1026 u'repo.pull_request.merge',
1026 u'repo.pull_request.merge',
1027 u'repo.pull_request.comment.create'
1027 u'repo.pull_request.comment.create'
1028 ]
1028 ]
1029 assert actions == expected_actions
1029 assert actions == expected_actions
1030
1030
1031 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
1031 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
1032 actions = [log for log in user_logs]
1032 actions = [log for log in user_logs]
1033 assert actions[-1].action == 'user.push'
1033 assert actions[-1].action == 'user.push'
1034 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
1034 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
1035
1035
1036 # Check post_push rcextension was really executed
1036 # Check post_push rcextension was really executed
1037 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
1037 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
1038 assert len(push_calls) == 1
1038 assert len(push_calls) == 1
1039 unused_last_call_args, last_call_kwargs = push_calls[0]
1039 unused_last_call_args, last_call_kwargs = push_calls[0]
1040 assert last_call_kwargs['action'] == 'push'
1040 assert last_call_kwargs['action'] == 'push'
1041 assert last_call_kwargs['commit_ids'] == pr_commit_ids
1041 assert last_call_kwargs['commit_ids'] == pr_commit_ids
1042
1042
1043 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1043 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1044 pull_request = pr_util.create_pull_request(mergeable=False)
1044 pull_request = pr_util.create_pull_request(mergeable=False)
1045 pull_request_id = pull_request.pull_request_id
1045 pull_request_id = pull_request.pull_request_id
1046 pull_request = PullRequest.get(pull_request_id)
1046 pull_request = PullRequest.get(pull_request_id)
1047
1047
1048 response = self.app.post(
1048 response = self.app.post(
1049 route_path('pullrequest_merge',
1049 route_path('pullrequest_merge',
1050 repo_name=pull_request.target_repo.scm_instance().name,
1050 repo_name=pull_request.target_repo.scm_instance().name,
1051 pull_request_id=pull_request.pull_request_id),
1051 pull_request_id=pull_request.pull_request_id),
1052 params={'csrf_token': csrf_token}).follow()
1052 params={'csrf_token': csrf_token}).follow()
1053
1053
1054 assert response.status_int == 200
1054 assert response.status_int == 200
1055 response.mustcontain(
1055 response.mustcontain(
1056 'Merge is not currently possible because of below failed checks.')
1056 'Merge is not currently possible because of below failed checks.')
1057 response.mustcontain('Server-side pull request merging is disabled.')
1057 response.mustcontain('Server-side pull request merging is disabled.')
1058
1058
1059 @pytest.mark.skip_backends('svn')
1059 @pytest.mark.skip_backends('svn')
1060 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
1060 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
1061 pull_request = pr_util.create_pull_request(mergeable=True)
1061 pull_request = pr_util.create_pull_request(mergeable=True)
1062 pull_request_id = pull_request.pull_request_id
1062 pull_request_id = pull_request.pull_request_id
1063 repo_name = pull_request.target_repo.scm_instance().name
1063 repo_name = pull_request.target_repo.scm_instance().name
1064
1064
1065 response = self.app.post(
1065 response = self.app.post(
1066 route_path('pullrequest_merge',
1066 route_path('pullrequest_merge',
1067 repo_name=repo_name, pull_request_id=pull_request_id),
1067 repo_name=repo_name, pull_request_id=pull_request_id),
1068 params={'csrf_token': csrf_token}).follow()
1068 params={'csrf_token': csrf_token}).follow()
1069
1069
1070 assert response.status_int == 200
1070 assert response.status_int == 200
1071
1071
1072 response.mustcontain(
1072 response.mustcontain(
1073 'Merge is not currently possible because of below failed checks.')
1073 'Merge is not currently possible because of below failed checks.')
1074 response.mustcontain('Pull request reviewer approval is pending.')
1074 response.mustcontain('Pull request reviewer approval is pending.')
1075
1075
1076 def test_merge_pull_request_renders_failure_reason(
1076 def test_merge_pull_request_renders_failure_reason(
1077 self, user_regular, csrf_token, pr_util):
1077 self, user_regular, csrf_token, pr_util):
1078 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
1078 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
1079 pull_request_id = pull_request.pull_request_id
1079 pull_request_id = pull_request.pull_request_id
1080 repo_name = pull_request.target_repo.scm_instance().name
1080 repo_name = pull_request.target_repo.scm_instance().name
1081
1081
1082 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
1082 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
1083 MergeFailureReason.PUSH_FAILED,
1083 MergeFailureReason.PUSH_FAILED,
1084 metadata={'target': 'shadow repo',
1084 metadata={'target': 'shadow repo',
1085 'merge_commit': 'xxx'})
1085 'merge_commit': 'xxx'})
1086 model_patcher = mock.patch.multiple(
1086 model_patcher = mock.patch.multiple(
1087 PullRequestModel,
1087 PullRequestModel,
1088 merge_repo=mock.Mock(return_value=merge_resp),
1088 merge_repo=mock.Mock(return_value=merge_resp),
1089 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
1089 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
1090
1090
1091 with model_patcher:
1091 with model_patcher:
1092 response = self.app.post(
1092 response = self.app.post(
1093 route_path('pullrequest_merge',
1093 route_path('pullrequest_merge',
1094 repo_name=repo_name,
1094 repo_name=repo_name,
1095 pull_request_id=pull_request_id),
1095 pull_request_id=pull_request_id),
1096 params={'csrf_token': csrf_token}, status=302)
1096 params={'csrf_token': csrf_token}, status=302)
1097
1097
1098 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
1098 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
1099 metadata={'target': 'shadow repo',
1099 metadata={'target': 'shadow repo',
1100 'merge_commit': 'xxx'})
1100 'merge_commit': 'xxx'})
1101 assert_session_flash(response, merge_resp.merge_status_message)
1101 assert_session_flash(response, merge_resp.merge_status_message)
1102
1102
1103 def test_update_source_revision(self, backend, csrf_token):
1103 def test_update_source_revision(self, backend, csrf_token):
1104 commits = [
1104 commits = [
1105 {'message': 'ancestor'},
1105 {'message': 'ancestor'},
1106 {'message': 'change'},
1106 {'message': 'change'},
1107 {'message': 'change-2'},
1107 {'message': 'change-2'},
1108 ]
1108 ]
1109 commit_ids = backend.create_master_repo(commits)
1109 commit_ids = backend.create_master_repo(commits)
1110 target = backend.create_repo(heads=['ancestor'])
1110 target = backend.create_repo(heads=['ancestor'])
1111 source = backend.create_repo(heads=['change'])
1111 source = backend.create_repo(heads=['change'])
1112
1112
1113 # create pr from a in source to A in target
1113 # create pr from a in source to A in target
1114 pull_request = PullRequest()
1114 pull_request = PullRequest()
1115
1115
1116 pull_request.source_repo = source
1116 pull_request.source_repo = source
1117 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1117 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1118 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1118 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1119
1119
1120 pull_request.target_repo = target
1120 pull_request.target_repo = target
1121 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1121 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1122 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1122 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1123
1123
1124 pull_request.revisions = [commit_ids['change']]
1124 pull_request.revisions = [commit_ids['change']]
1125 pull_request.title = u"Test"
1125 pull_request.title = u"Test"
1126 pull_request.description = u"Description"
1126 pull_request.description = u"Description"
1127 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1127 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1128 pull_request.pull_request_state = PullRequest.STATE_CREATED
1128 pull_request.pull_request_state = PullRequest.STATE_CREATED
1129 Session().add(pull_request)
1129 Session().add(pull_request)
1130 Session().commit()
1130 Session().commit()
1131 pull_request_id = pull_request.pull_request_id
1131 pull_request_id = pull_request.pull_request_id
1132
1132
1133 # source has ancestor - change - change-2
1133 # source has ancestor - change - change-2
1134 backend.pull_heads(source, heads=['change-2'])
1134 backend.pull_heads(source, heads=['change-2'])
1135 target_repo_name = target.repo_name
1135 target_repo_name = target.repo_name
1136
1136
1137 # update PR
1137 # update PR
1138 self.app.post(
1138 self.app.post(
1139 route_path('pullrequest_update',
1139 route_path('pullrequest_update',
1140 repo_name=target_repo_name, pull_request_id=pull_request_id),
1140 repo_name=target_repo_name, pull_request_id=pull_request_id),
1141 params={'update_commits': 'true', 'csrf_token': csrf_token})
1141 params={'update_commits': 'true', 'csrf_token': csrf_token})
1142
1142
1143 response = self.app.get(
1143 response = self.app.get(
1144 route_path('pullrequest_show',
1144 route_path('pullrequest_show',
1145 repo_name=target_repo_name,
1145 repo_name=target_repo_name,
1146 pull_request_id=pull_request.pull_request_id))
1146 pull_request_id=pull_request.pull_request_id))
1147
1147
1148 assert response.status_int == 200
1148 assert response.status_int == 200
1149 response.mustcontain('Pull request updated to')
1149 response.mustcontain('Pull request updated to')
1150 response.mustcontain('with 1 added, 0 removed commits.')
1150 response.mustcontain('with 1 added, 0 removed commits.')
1151
1151
1152 # check that we have now both revisions
1152 # check that we have now both revisions
1153 pull_request = PullRequest.get(pull_request_id)
1153 pull_request = PullRequest.get(pull_request_id)
1154 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
1154 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
1155
1155
1156 def test_update_target_revision(self, backend, csrf_token):
1156 def test_update_target_revision(self, backend, csrf_token):
1157 commits = [
1157 commits = [
1158 {'message': 'ancestor'},
1158 {'message': 'ancestor'},
1159 {'message': 'change'},
1159 {'message': 'change'},
1160 {'message': 'ancestor-new', 'parents': ['ancestor']},
1160 {'message': 'ancestor-new', 'parents': ['ancestor']},
1161 {'message': 'change-rebased'},
1161 {'message': 'change-rebased'},
1162 ]
1162 ]
1163 commit_ids = backend.create_master_repo(commits)
1163 commit_ids = backend.create_master_repo(commits)
1164 target = backend.create_repo(heads=['ancestor'])
1164 target = backend.create_repo(heads=['ancestor'])
1165 source = backend.create_repo(heads=['change'])
1165 source = backend.create_repo(heads=['change'])
1166
1166
1167 # create pr from a in source to A in target
1167 # create pr from a in source to A in target
1168 pull_request = PullRequest()
1168 pull_request = PullRequest()
1169
1169
1170 pull_request.source_repo = source
1170 pull_request.source_repo = source
1171 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1171 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1172 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1172 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1173
1173
1174 pull_request.target_repo = target
1174 pull_request.target_repo = target
1175 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1175 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1176 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1176 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1177
1177
1178 pull_request.revisions = [commit_ids['change']]
1178 pull_request.revisions = [commit_ids['change']]
1179 pull_request.title = u"Test"
1179 pull_request.title = u"Test"
1180 pull_request.description = u"Description"
1180 pull_request.description = u"Description"
1181 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1181 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1182 pull_request.pull_request_state = PullRequest.STATE_CREATED
1182 pull_request.pull_request_state = PullRequest.STATE_CREATED
1183
1183
1184 Session().add(pull_request)
1184 Session().add(pull_request)
1185 Session().commit()
1185 Session().commit()
1186 pull_request_id = pull_request.pull_request_id
1186 pull_request_id = pull_request.pull_request_id
1187
1187
1188 # target has ancestor - ancestor-new
1188 # target has ancestor - ancestor-new
1189 # source has ancestor - ancestor-new - change-rebased
1189 # source has ancestor - ancestor-new - change-rebased
1190 backend.pull_heads(target, heads=['ancestor-new'])
1190 backend.pull_heads(target, heads=['ancestor-new'])
1191 backend.pull_heads(source, heads=['change-rebased'])
1191 backend.pull_heads(source, heads=['change-rebased'])
1192 target_repo_name = target.repo_name
1192 target_repo_name = target.repo_name
1193
1193
1194 # update PR
1194 # update PR
1195 url = route_path('pullrequest_update',
1195 url = route_path('pullrequest_update',
1196 repo_name=target_repo_name,
1196 repo_name=target_repo_name,
1197 pull_request_id=pull_request_id)
1197 pull_request_id=pull_request_id)
1198 self.app.post(url,
1198 self.app.post(url,
1199 params={'update_commits': 'true', 'csrf_token': csrf_token},
1199 params={'update_commits': 'true', 'csrf_token': csrf_token},
1200 status=200)
1200 status=200)
1201
1201
1202 # check that we have now both revisions
1202 # check that we have now both revisions
1203 pull_request = PullRequest.get(pull_request_id)
1203 pull_request = PullRequest.get(pull_request_id)
1204 assert pull_request.revisions == [commit_ids['change-rebased']]
1204 assert pull_request.revisions == [commit_ids['change-rebased']]
1205 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
1205 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
1206 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
1206 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
1207
1207
1208 response = self.app.get(
1208 response = self.app.get(
1209 route_path('pullrequest_show',
1209 route_path('pullrequest_show',
1210 repo_name=target_repo_name,
1210 repo_name=target_repo_name,
1211 pull_request_id=pull_request.pull_request_id))
1211 pull_request_id=pull_request.pull_request_id))
1212 assert response.status_int == 200
1212 assert response.status_int == 200
1213 response.mustcontain('Pull request updated to')
1213 response.mustcontain('Pull request updated to')
1214 response.mustcontain('with 1 added, 1 removed commits.')
1214 response.mustcontain('with 1 added, 1 removed commits.')
1215
1215
1216 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
1216 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
1217 backend = backend_git
1217 backend = backend_git
1218 commits = [
1218 commits = [
1219 {'message': 'master-commit-1'},
1219 {'message': 'master-commit-1'},
1220 {'message': 'master-commit-2-change-1'},
1220 {'message': 'master-commit-2-change-1'},
1221 {'message': 'master-commit-3-change-2'},
1221 {'message': 'master-commit-3-change-2'},
1222
1222
1223 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
1223 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
1224 {'message': 'feat-commit-2'},
1224 {'message': 'feat-commit-2'},
1225 ]
1225 ]
1226 commit_ids = backend.create_master_repo(commits)
1226 commit_ids = backend.create_master_repo(commits)
1227 target = backend.create_repo(heads=['master-commit-3-change-2'])
1227 target = backend.create_repo(heads=['master-commit-3-change-2'])
1228 source = backend.create_repo(heads=['feat-commit-2'])
1228 source = backend.create_repo(heads=['feat-commit-2'])
1229
1229
1230 # create pr from a in source to A in target
1230 # create pr from a in source to A in target
1231 pull_request = PullRequest()
1231 pull_request = PullRequest()
1232 pull_request.source_repo = source
1232 pull_request.source_repo = source
1233
1233
1234 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1234 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1235 branch=backend.default_branch_name,
1235 branch=backend.default_branch_name,
1236 commit_id=commit_ids['master-commit-3-change-2'])
1236 commit_id=commit_ids['master-commit-3-change-2'])
1237
1237
1238 pull_request.target_repo = target
1238 pull_request.target_repo = target
1239 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1239 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1240 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
1240 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
1241
1241
1242 pull_request.revisions = [
1242 pull_request.revisions = [
1243 commit_ids['feat-commit-1'],
1243 commit_ids['feat-commit-1'],
1244 commit_ids['feat-commit-2']
1244 commit_ids['feat-commit-2']
1245 ]
1245 ]
1246 pull_request.title = u"Test"
1246 pull_request.title = u"Test"
1247 pull_request.description = u"Description"
1247 pull_request.description = u"Description"
1248 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1248 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1249 pull_request.pull_request_state = PullRequest.STATE_CREATED
1249 pull_request.pull_request_state = PullRequest.STATE_CREATED
1250 Session().add(pull_request)
1250 Session().add(pull_request)
1251 Session().commit()
1251 Session().commit()
1252 pull_request_id = pull_request.pull_request_id
1252 pull_request_id = pull_request.pull_request_id
1253
1253
1254 # PR is created, now we simulate a force-push into target,
1254 # PR is created, now we simulate a force-push into target,
1255 # that drops a 2 last commits
1255 # that drops a 2 last commits
1256 vcsrepo = target.scm_instance()
1256 vcsrepo = target.scm_instance()
1257 vcsrepo.config.clear_section('hooks')
1257 vcsrepo.config.clear_section('hooks')
1258 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
1258 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
1259 target_repo_name = target.repo_name
1259 target_repo_name = target.repo_name
1260
1260
1261 # update PR
1261 # update PR
1262 url = route_path('pullrequest_update',
1262 url = route_path('pullrequest_update',
1263 repo_name=target_repo_name,
1263 repo_name=target_repo_name,
1264 pull_request_id=pull_request_id)
1264 pull_request_id=pull_request_id)
1265 self.app.post(url,
1265 self.app.post(url,
1266 params={'update_commits': 'true', 'csrf_token': csrf_token},
1266 params={'update_commits': 'true', 'csrf_token': csrf_token},
1267 status=200)
1267 status=200)
1268
1268
1269 response = self.app.get(route_path('pullrequest_new', repo_name=target_repo_name))
1269 response = self.app.get(route_path('pullrequest_new', repo_name=target_repo_name))
1270 assert response.status_int == 200
1270 assert response.status_int == 200
1271 response.mustcontain('Pull request updated to')
1271 response.mustcontain('Pull request updated to')
1272 response.mustcontain('with 0 added, 0 removed commits.')
1272 response.mustcontain('with 0 added, 0 removed commits.')
1273
1273
1274 def test_update_of_ancestor_reference(self, backend, csrf_token):
1274 def test_update_of_ancestor_reference(self, backend, csrf_token):
1275 commits = [
1275 commits = [
1276 {'message': 'ancestor'},
1276 {'message': 'ancestor'},
1277 {'message': 'change'},
1277 {'message': 'change'},
1278 {'message': 'change-2'},
1278 {'message': 'change-2'},
1279 {'message': 'ancestor-new', 'parents': ['ancestor']},
1279 {'message': 'ancestor-new', 'parents': ['ancestor']},
1280 {'message': 'change-rebased'},
1280 {'message': 'change-rebased'},
1281 ]
1281 ]
1282 commit_ids = backend.create_master_repo(commits)
1282 commit_ids = backend.create_master_repo(commits)
1283 target = backend.create_repo(heads=['ancestor'])
1283 target = backend.create_repo(heads=['ancestor'])
1284 source = backend.create_repo(heads=['change'])
1284 source = backend.create_repo(heads=['change'])
1285
1285
1286 # create pr from a in source to A in target
1286 # create pr from a in source to A in target
1287 pull_request = PullRequest()
1287 pull_request = PullRequest()
1288 pull_request.source_repo = source
1288 pull_request.source_repo = source
1289
1289
1290 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1290 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1291 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1291 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1292 pull_request.target_repo = target
1292 pull_request.target_repo = target
1293 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1293 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1294 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1294 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1295 pull_request.revisions = [commit_ids['change']]
1295 pull_request.revisions = [commit_ids['change']]
1296 pull_request.title = u"Test"
1296 pull_request.title = u"Test"
1297 pull_request.description = u"Description"
1297 pull_request.description = u"Description"
1298 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1298 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1299 pull_request.pull_request_state = PullRequest.STATE_CREATED
1299 pull_request.pull_request_state = PullRequest.STATE_CREATED
1300 Session().add(pull_request)
1300 Session().add(pull_request)
1301 Session().commit()
1301 Session().commit()
1302 pull_request_id = pull_request.pull_request_id
1302 pull_request_id = pull_request.pull_request_id
1303
1303
1304 # target has ancestor - ancestor-new
1304 # target has ancestor - ancestor-new
1305 # source has ancestor - ancestor-new - change-rebased
1305 # source has ancestor - ancestor-new - change-rebased
1306 backend.pull_heads(target, heads=['ancestor-new'])
1306 backend.pull_heads(target, heads=['ancestor-new'])
1307 backend.pull_heads(source, heads=['change-rebased'])
1307 backend.pull_heads(source, heads=['change-rebased'])
1308 target_repo_name = target.repo_name
1308 target_repo_name = target.repo_name
1309
1309
1310 # update PR
1310 # update PR
1311 self.app.post(
1311 self.app.post(
1312 route_path('pullrequest_update',
1312 route_path('pullrequest_update',
1313 repo_name=target_repo_name, pull_request_id=pull_request_id),
1313 repo_name=target_repo_name, pull_request_id=pull_request_id),
1314 params={'update_commits': 'true', 'csrf_token': csrf_token},
1314 params={'update_commits': 'true', 'csrf_token': csrf_token},
1315 status=200)
1315 status=200)
1316
1316
1317 # Expect the target reference to be updated correctly
1317 # Expect the target reference to be updated correctly
1318 pull_request = PullRequest.get(pull_request_id)
1318 pull_request = PullRequest.get(pull_request_id)
1319 assert pull_request.revisions == [commit_ids['change-rebased']]
1319 assert pull_request.revisions == [commit_ids['change-rebased']]
1320 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
1320 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
1321 branch=backend.default_branch_name,
1321 branch=backend.default_branch_name,
1322 commit_id=commit_ids['ancestor-new'])
1322 commit_id=commit_ids['ancestor-new'])
1323 assert pull_request.target_ref == expected_target_ref
1323 assert pull_request.target_ref == expected_target_ref
1324
1324
1325 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1325 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1326 branch_name = 'development'
1326 branch_name = 'development'
1327 commits = [
1327 commits = [
1328 {'message': 'initial-commit'},
1328 {'message': 'initial-commit'},
1329 {'message': 'old-feature'},
1329 {'message': 'old-feature'},
1330 {'message': 'new-feature', 'branch': branch_name},
1330 {'message': 'new-feature', 'branch': branch_name},
1331 ]
1331 ]
1332 repo = backend_git.create_repo(commits)
1332 repo = backend_git.create_repo(commits)
1333 repo_name = repo.repo_name
1333 repo_name = repo.repo_name
1334 commit_ids = backend_git.commit_ids
1334 commit_ids = backend_git.commit_ids
1335
1335
1336 pull_request = PullRequest()
1336 pull_request = PullRequest()
1337 pull_request.source_repo = repo
1337 pull_request.source_repo = repo
1338 pull_request.target_repo = repo
1338 pull_request.target_repo = repo
1339 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1339 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1340 branch=branch_name, commit_id=commit_ids['new-feature'])
1340 branch=branch_name, commit_id=commit_ids['new-feature'])
1341 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1341 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1342 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
1342 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
1343 pull_request.revisions = [commit_ids['new-feature']]
1343 pull_request.revisions = [commit_ids['new-feature']]
1344 pull_request.title = u"Test"
1344 pull_request.title = u"Test"
1345 pull_request.description = u"Description"
1345 pull_request.description = u"Description"
1346 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1346 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1347 pull_request.pull_request_state = PullRequest.STATE_CREATED
1347 pull_request.pull_request_state = PullRequest.STATE_CREATED
1348 Session().add(pull_request)
1348 Session().add(pull_request)
1349 Session().commit()
1349 Session().commit()
1350
1350
1351 pull_request_id = pull_request.pull_request_id
1351 pull_request_id = pull_request.pull_request_id
1352
1352
1353 vcs = repo.scm_instance()
1353 vcs = repo.scm_instance()
1354 vcs.remove_ref('refs/heads/{}'.format(branch_name))
1354 vcs.remove_ref('refs/heads/{}'.format(branch_name))
1355 # NOTE(marcink): run GC to ensure the commits are gone
1355 # NOTE(marcink): run GC to ensure the commits are gone
1356 vcs.run_gc()
1356 vcs.run_gc()
1357
1357
1358 response = self.app.get(route_path(
1358 response = self.app.get(route_path(
1359 'pullrequest_show',
1359 'pullrequest_show',
1360 repo_name=repo_name,
1360 repo_name=repo_name,
1361 pull_request_id=pull_request_id))
1361 pull_request_id=pull_request_id))
1362
1362
1363 assert response.status_int == 200
1363 assert response.status_int == 200
1364
1364
1365 response.assert_response().element_contains(
1365 response.assert_response().element_contains(
1366 '#changeset_compare_view_content .alert strong',
1366 '#changeset_compare_view_content .alert strong',
1367 'Missing commits')
1367 'Missing commits')
1368 response.assert_response().element_contains(
1368 response.assert_response().element_contains(
1369 '#changeset_compare_view_content .alert',
1369 '#changeset_compare_view_content .alert',
1370 'This pull request cannot be displayed, because one or more'
1370 'This pull request cannot be displayed, because one or more'
1371 ' commits no longer exist in the source repository.')
1371 ' commits no longer exist in the source repository.')
1372
1372
1373 def test_strip_commits_from_pull_request(
1373 def test_strip_commits_from_pull_request(
1374 self, backend, pr_util, csrf_token):
1374 self, backend, pr_util, csrf_token):
1375 commits = [
1375 commits = [
1376 {'message': 'initial-commit'},
1376 {'message': 'initial-commit'},
1377 {'message': 'old-feature'},
1377 {'message': 'old-feature'},
1378 {'message': 'new-feature', 'parents': ['initial-commit']},
1378 {'message': 'new-feature', 'parents': ['initial-commit']},
1379 ]
1379 ]
1380 pull_request = pr_util.create_pull_request(
1380 pull_request = pr_util.create_pull_request(
1381 commits, target_head='initial-commit', source_head='new-feature',
1381 commits, target_head='initial-commit', source_head='new-feature',
1382 revisions=['new-feature'])
1382 revisions=['new-feature'])
1383
1383
1384 vcs = pr_util.source_repository.scm_instance()
1384 vcs = pr_util.source_repository.scm_instance()
1385 if backend.alias == 'git':
1385 if backend.alias == 'git':
1386 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1386 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1387 else:
1387 else:
1388 vcs.strip(pr_util.commit_ids['new-feature'])
1388 vcs.strip(pr_util.commit_ids['new-feature'])
1389
1389
1390 response = self.app.get(route_path(
1390 response = self.app.get(route_path(
1391 'pullrequest_show',
1391 'pullrequest_show',
1392 repo_name=pr_util.target_repository.repo_name,
1392 repo_name=pr_util.target_repository.repo_name,
1393 pull_request_id=pull_request.pull_request_id))
1393 pull_request_id=pull_request.pull_request_id))
1394
1394
1395 assert response.status_int == 200
1395 assert response.status_int == 200
1396
1396
1397 response.assert_response().element_contains(
1397 response.assert_response().element_contains(
1398 '#changeset_compare_view_content .alert strong',
1398 '#changeset_compare_view_content .alert strong',
1399 'Missing commits')
1399 'Missing commits')
1400 response.assert_response().element_contains(
1400 response.assert_response().element_contains(
1401 '#changeset_compare_view_content .alert',
1401 '#changeset_compare_view_content .alert',
1402 'This pull request cannot be displayed, because one or more'
1402 'This pull request cannot be displayed, because one or more'
1403 ' commits no longer exist in the source repository.')
1403 ' commits no longer exist in the source repository.')
1404 response.assert_response().element_contains(
1404 response.assert_response().element_contains(
1405 '#update_commits',
1405 '#update_commits',
1406 'Update commits')
1406 'Update commits')
1407
1407
1408 def test_strip_commits_and_update(
1408 def test_strip_commits_and_update(
1409 self, backend, pr_util, csrf_token):
1409 self, backend, pr_util, csrf_token):
1410 commits = [
1410 commits = [
1411 {'message': 'initial-commit'},
1411 {'message': 'initial-commit'},
1412 {'message': 'old-feature'},
1412 {'message': 'old-feature'},
1413 {'message': 'new-feature', 'parents': ['old-feature']},
1413 {'message': 'new-feature', 'parents': ['old-feature']},
1414 ]
1414 ]
1415 pull_request = pr_util.create_pull_request(
1415 pull_request = pr_util.create_pull_request(
1416 commits, target_head='old-feature', source_head='new-feature',
1416 commits, target_head='old-feature', source_head='new-feature',
1417 revisions=['new-feature'], mergeable=True)
1417 revisions=['new-feature'], mergeable=True)
1418 pr_id = pull_request.pull_request_id
1418 pr_id = pull_request.pull_request_id
1419 target_repo_name = pull_request.target_repo.repo_name
1419 target_repo_name = pull_request.target_repo.repo_name
1420
1420
1421 vcs = pr_util.source_repository.scm_instance()
1421 vcs = pr_util.source_repository.scm_instance()
1422 if backend.alias == 'git':
1422 if backend.alias == 'git':
1423 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1423 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1424 else:
1424 else:
1425 vcs.strip(pr_util.commit_ids['new-feature'])
1425 vcs.strip(pr_util.commit_ids['new-feature'])
1426
1426
1427 url = route_path('pullrequest_update',
1427 url = route_path('pullrequest_update',
1428 repo_name=target_repo_name,
1428 repo_name=target_repo_name,
1429 pull_request_id=pr_id)
1429 pull_request_id=pr_id)
1430 response = self.app.post(url,
1430 response = self.app.post(url,
1431 params={'update_commits': 'true',
1431 params={'update_commits': 'true',
1432 'csrf_token': csrf_token})
1432 'csrf_token': csrf_token})
1433
1433
1434 assert response.status_int == 200
1434 assert response.status_int == 200
1435 assert json.loads(response.body) == json.loads('{"response": true, "redirect_url": null}')
1435 assert json.loads(response.body) == json.loads('{"response": true, "redirect_url": null}')
1436
1436
1437 # Make sure that after update, it won't raise 500 errors
1437 # Make sure that after update, it won't raise 500 errors
1438 response = self.app.get(route_path(
1438 response = self.app.get(route_path(
1439 'pullrequest_show',
1439 'pullrequest_show',
1440 repo_name=target_repo_name,
1440 repo_name=target_repo_name,
1441 pull_request_id=pr_id))
1441 pull_request_id=pr_id))
1442
1442
1443 assert response.status_int == 200
1443 assert response.status_int == 200
1444 response.assert_response().element_contains(
1444 response.assert_response().element_contains(
1445 '#changeset_compare_view_content .alert strong',
1445 '#changeset_compare_view_content .alert strong',
1446 'Missing commits')
1446 'Missing commits')
1447
1447
1448 def test_branch_is_a_link(self, pr_util):
1448 def test_branch_is_a_link(self, pr_util):
1449 pull_request = pr_util.create_pull_request()
1449 pull_request = pr_util.create_pull_request()
1450 pull_request.source_ref = 'branch:origin:1234567890abcdef'
1450 pull_request.source_ref = 'branch:origin:1234567890abcdef'
1451 pull_request.target_ref = 'branch:target:abcdef1234567890'
1451 pull_request.target_ref = 'branch:target:abcdef1234567890'
1452 Session().add(pull_request)
1452 Session().add(pull_request)
1453 Session().commit()
1453 Session().commit()
1454
1454
1455 response = self.app.get(route_path(
1455 response = self.app.get(route_path(
1456 'pullrequest_show',
1456 'pullrequest_show',
1457 repo_name=pull_request.target_repo.scm_instance().name,
1457 repo_name=pull_request.target_repo.scm_instance().name,
1458 pull_request_id=pull_request.pull_request_id))
1458 pull_request_id=pull_request.pull_request_id))
1459 assert response.status_int == 200
1459 assert response.status_int == 200
1460
1460
1461 source = response.assert_response().get_element('.pr-source-info')
1461 source = response.assert_response().get_element('.pr-source-info')
1462 source_parent = source.getparent()
1462 source_parent = source.getparent()
1463 assert len(source_parent) == 1
1463 assert len(source_parent) == 1
1464
1464
1465 target = response.assert_response().get_element('.pr-target-info')
1465 target = response.assert_response().get_element('.pr-target-info')
1466 target_parent = target.getparent()
1466 target_parent = target.getparent()
1467 assert len(target_parent) == 1
1467 assert len(target_parent) == 1
1468
1468
1469 expected_origin_link = route_path(
1469 expected_origin_link = route_path(
1470 'repo_commits',
1470 'repo_commits',
1471 repo_name=pull_request.source_repo.scm_instance().name,
1471 repo_name=pull_request.source_repo.scm_instance().name,
1472 params=dict(branch='origin'))
1472 params=dict(branch='origin'))
1473 expected_target_link = route_path(
1473 expected_target_link = route_path(
1474 'repo_commits',
1474 'repo_commits',
1475 repo_name=pull_request.target_repo.scm_instance().name,
1475 repo_name=pull_request.target_repo.scm_instance().name,
1476 params=dict(branch='target'))
1476 params=dict(branch='target'))
1477 assert source_parent.attrib['href'] == expected_origin_link
1477 assert source_parent.attrib['href'] == expected_origin_link
1478 assert target_parent.attrib['href'] == expected_target_link
1478 assert target_parent.attrib['href'] == expected_target_link
1479
1479
1480 def test_bookmark_is_not_a_link(self, pr_util):
1480 def test_bookmark_is_not_a_link(self, pr_util):
1481 pull_request = pr_util.create_pull_request()
1481 pull_request = pr_util.create_pull_request()
1482 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1482 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1483 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1483 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1484 Session().add(pull_request)
1484 Session().add(pull_request)
1485 Session().commit()
1485 Session().commit()
1486
1486
1487 response = self.app.get(route_path(
1487 response = self.app.get(route_path(
1488 'pullrequest_show',
1488 'pullrequest_show',
1489 repo_name=pull_request.target_repo.scm_instance().name,
1489 repo_name=pull_request.target_repo.scm_instance().name,
1490 pull_request_id=pull_request.pull_request_id))
1490 pull_request_id=pull_request.pull_request_id))
1491 assert response.status_int == 200
1491 assert response.status_int == 200
1492
1492
1493 source = response.assert_response().get_element('.pr-source-info')
1493 source = response.assert_response().get_element('.pr-source-info')
1494 assert source.text.strip() == 'bookmark:origin'
1494 assert source.text.strip() == 'bookmark:origin'
1495 assert source.getparent().attrib.get('href') is None
1495 assert source.getparent().attrib.get('href') is None
1496
1496
1497 target = response.assert_response().get_element('.pr-target-info')
1497 target = response.assert_response().get_element('.pr-target-info')
1498 assert target.text.strip() == 'bookmark:target'
1498 assert target.text.strip() == 'bookmark:target'
1499 assert target.getparent().attrib.get('href') is None
1499 assert target.getparent().attrib.get('href') is None
1500
1500
1501 def test_tag_is_not_a_link(self, pr_util):
1501 def test_tag_is_not_a_link(self, pr_util):
1502 pull_request = pr_util.create_pull_request()
1502 pull_request = pr_util.create_pull_request()
1503 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1503 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1504 pull_request.target_ref = 'tag:target:abcdef1234567890'
1504 pull_request.target_ref = 'tag:target:abcdef1234567890'
1505 Session().add(pull_request)
1505 Session().add(pull_request)
1506 Session().commit()
1506 Session().commit()
1507
1507
1508 response = self.app.get(route_path(
1508 response = self.app.get(route_path(
1509 'pullrequest_show',
1509 'pullrequest_show',
1510 repo_name=pull_request.target_repo.scm_instance().name,
1510 repo_name=pull_request.target_repo.scm_instance().name,
1511 pull_request_id=pull_request.pull_request_id))
1511 pull_request_id=pull_request.pull_request_id))
1512 assert response.status_int == 200
1512 assert response.status_int == 200
1513
1513
1514 source = response.assert_response().get_element('.pr-source-info')
1514 source = response.assert_response().get_element('.pr-source-info')
1515 assert source.text.strip() == 'tag:origin'
1515 assert source.text.strip() == 'tag:origin'
1516 assert source.getparent().attrib.get('href') is None
1516 assert source.getparent().attrib.get('href') is None
1517
1517
1518 target = response.assert_response().get_element('.pr-target-info')
1518 target = response.assert_response().get_element('.pr-target-info')
1519 assert target.text.strip() == 'tag:target'
1519 assert target.text.strip() == 'tag:target'
1520 assert target.getparent().attrib.get('href') is None
1520 assert target.getparent().attrib.get('href') is None
1521
1521
1522 @pytest.mark.parametrize('mergeable', [True, False])
1522 @pytest.mark.parametrize('mergeable', [True, False])
1523 def test_shadow_repository_link(
1523 def test_shadow_repository_link(
1524 self, mergeable, pr_util, http_host_only_stub):
1524 self, mergeable, pr_util, http_host_only_stub):
1525 """
1525 """
1526 Check that the pull request summary page displays a link to the shadow
1526 Check that the pull request summary page displays a link to the shadow
1527 repository if the pull request is mergeable. If it is not mergeable
1527 repository if the pull request is mergeable. If it is not mergeable
1528 the link should not be displayed.
1528 the link should not be displayed.
1529 """
1529 """
1530 pull_request = pr_util.create_pull_request(
1530 pull_request = pr_util.create_pull_request(
1531 mergeable=mergeable, enable_notifications=False)
1531 mergeable=mergeable, enable_notifications=False)
1532 target_repo = pull_request.target_repo.scm_instance()
1532 target_repo = pull_request.target_repo.scm_instance()
1533 pr_id = pull_request.pull_request_id
1533 pr_id = pull_request.pull_request_id
1534 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1534 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1535 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1535 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1536
1536
1537 response = self.app.get(route_path(
1537 response = self.app.get(route_path(
1538 'pullrequest_show',
1538 'pullrequest_show',
1539 repo_name=target_repo.name,
1539 repo_name=target_repo.name,
1540 pull_request_id=pr_id))
1540 pull_request_id=pr_id))
1541
1541
1542 if mergeable:
1542 if mergeable:
1543 response.assert_response().element_value_contains(
1543 response.assert_response().element_value_contains(
1544 'input.pr-mergeinfo', shadow_url)
1544 'input.pr-mergeinfo', shadow_url)
1545 response.assert_response().element_value_contains(
1545 response.assert_response().element_value_contains(
1546 'input.pr-mergeinfo ', 'pr-merge')
1546 'input.pr-mergeinfo ', 'pr-merge')
1547 else:
1547 else:
1548 response.assert_response().no_element_exists('.pr-mergeinfo')
1548 response.assert_response().no_element_exists('.pr-mergeinfo')
1549
1549
1550
1550
1551 @pytest.mark.usefixtures('app')
1551 @pytest.mark.usefixtures('app')
1552 @pytest.mark.backends("git", "hg")
1552 @pytest.mark.backends("git", "hg")
1553 class TestPullrequestsControllerDelete(object):
1553 class TestPullrequestsControllerDelete(object):
1554 def test_pull_request_delete_button_permissions_admin(
1554 def test_pull_request_delete_button_permissions_admin(
1555 self, autologin_user, user_admin, pr_util):
1555 self, autologin_user, user_admin, pr_util):
1556 pull_request = pr_util.create_pull_request(
1556 pull_request = pr_util.create_pull_request(
1557 author=user_admin.username, enable_notifications=False)
1557 author=user_admin.username, enable_notifications=False)
1558
1558
1559 response = self.app.get(route_path(
1559 response = self.app.get(route_path(
1560 'pullrequest_show',
1560 'pullrequest_show',
1561 repo_name=pull_request.target_repo.scm_instance().name,
1561 repo_name=pull_request.target_repo.scm_instance().name,
1562 pull_request_id=pull_request.pull_request_id))
1562 pull_request_id=pull_request.pull_request_id))
1563
1563
1564 response.mustcontain('id="delete_pullrequest"')
1564 response.mustcontain('id="delete_pullrequest"')
1565 response.mustcontain('Confirm to delete this pull request')
1565 response.mustcontain('Confirm to delete this pull request')
1566
1566
1567 def test_pull_request_delete_button_permissions_owner(
1567 def test_pull_request_delete_button_permissions_owner(
1568 self, autologin_regular_user, user_regular, pr_util):
1568 self, autologin_regular_user, user_regular, pr_util):
1569 pull_request = pr_util.create_pull_request(
1569 pull_request = pr_util.create_pull_request(
1570 author=user_regular.username, enable_notifications=False)
1570 author=user_regular.username, enable_notifications=False)
1571
1571
1572 response = self.app.get(route_path(
1572 response = self.app.get(route_path(
1573 'pullrequest_show',
1573 'pullrequest_show',
1574 repo_name=pull_request.target_repo.scm_instance().name,
1574 repo_name=pull_request.target_repo.scm_instance().name,
1575 pull_request_id=pull_request.pull_request_id))
1575 pull_request_id=pull_request.pull_request_id))
1576
1576
1577 response.mustcontain('id="delete_pullrequest"')
1577 response.mustcontain('id="delete_pullrequest"')
1578 response.mustcontain('Confirm to delete this pull request')
1578 response.mustcontain('Confirm to delete this pull request')
1579
1579
1580 def test_pull_request_delete_button_permissions_forbidden(
1580 def test_pull_request_delete_button_permissions_forbidden(
1581 self, autologin_regular_user, user_regular, user_admin, pr_util):
1581 self, autologin_regular_user, user_regular, user_admin, pr_util):
1582 pull_request = pr_util.create_pull_request(
1582 pull_request = pr_util.create_pull_request(
1583 author=user_admin.username, enable_notifications=False)
1583 author=user_admin.username, enable_notifications=False)
1584
1584
1585 response = self.app.get(route_path(
1585 response = self.app.get(route_path(
1586 'pullrequest_show',
1586 'pullrequest_show',
1587 repo_name=pull_request.target_repo.scm_instance().name,
1587 repo_name=pull_request.target_repo.scm_instance().name,
1588 pull_request_id=pull_request.pull_request_id))
1588 pull_request_id=pull_request.pull_request_id))
1589 response.mustcontain(no=['id="delete_pullrequest"'])
1589 response.mustcontain(no=['id="delete_pullrequest"'])
1590 response.mustcontain(no=['Confirm to delete this pull request'])
1590 response.mustcontain(no=['Confirm to delete this pull request'])
1591
1591
1592 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1592 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1593 self, autologin_regular_user, user_regular, user_admin, pr_util,
1593 self, autologin_regular_user, user_regular, user_admin, pr_util,
1594 user_util):
1594 user_util):
1595
1595
1596 pull_request = pr_util.create_pull_request(
1596 pull_request = pr_util.create_pull_request(
1597 author=user_admin.username, enable_notifications=False)
1597 author=user_admin.username, enable_notifications=False)
1598
1598
1599 user_util.grant_user_permission_to_repo(
1599 user_util.grant_user_permission_to_repo(
1600 pull_request.target_repo, user_regular,
1600 pull_request.target_repo, user_regular,
1601 'repository.write')
1601 'repository.write')
1602
1602
1603 response = self.app.get(route_path(
1603 response = self.app.get(route_path(
1604 'pullrequest_show',
1604 'pullrequest_show',
1605 repo_name=pull_request.target_repo.scm_instance().name,
1605 repo_name=pull_request.target_repo.scm_instance().name,
1606 pull_request_id=pull_request.pull_request_id))
1606 pull_request_id=pull_request.pull_request_id))
1607
1607
1608 response.mustcontain('id="open_edit_pullrequest"')
1608 response.mustcontain('id="open_edit_pullrequest"')
1609 response.mustcontain('id="delete_pullrequest"')
1609 response.mustcontain('id="delete_pullrequest"')
1610 response.mustcontain(no=['Confirm to delete this pull request'])
1610 response.mustcontain(no=['Confirm to delete this pull request'])
1611
1611
1612 def test_delete_comment_returns_404_if_comment_does_not_exist(
1612 def test_delete_comment_returns_404_if_comment_does_not_exist(
1613 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1613 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1614
1614
1615 pull_request = pr_util.create_pull_request(
1615 pull_request = pr_util.create_pull_request(
1616 author=user_admin.username, enable_notifications=False)
1616 author=user_admin.username, enable_notifications=False)
1617
1617
1618 self.app.post(
1618 self.app.post(
1619 route_path(
1619 route_path(
1620 'pullrequest_comment_delete',
1620 'pullrequest_comment_delete',
1621 repo_name=pull_request.target_repo.scm_instance().name,
1621 repo_name=pull_request.target_repo.scm_instance().name,
1622 pull_request_id=pull_request.pull_request_id,
1622 pull_request_id=pull_request.pull_request_id,
1623 comment_id=1024404),
1623 comment_id=1024404),
1624 extra_environ=xhr_header,
1624 extra_environ=xhr_header,
1625 params={'csrf_token': csrf_token},
1625 params={'csrf_token': csrf_token},
1626 status=404
1626 status=404
1627 )
1627 )
1628
1628
1629 def test_delete_comment(
1629 def test_delete_comment(
1630 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1630 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1631
1631
1632 pull_request = pr_util.create_pull_request(
1632 pull_request = pr_util.create_pull_request(
1633 author=user_admin.username, enable_notifications=False)
1633 author=user_admin.username, enable_notifications=False)
1634 comment = pr_util.create_comment()
1634 comment = pr_util.create_comment()
1635 comment_id = comment.comment_id
1635 comment_id = comment.comment_id
1636
1636
1637 response = self.app.post(
1637 response = self.app.post(
1638 route_path(
1638 route_path(
1639 'pullrequest_comment_delete',
1639 'pullrequest_comment_delete',
1640 repo_name=pull_request.target_repo.scm_instance().name,
1640 repo_name=pull_request.target_repo.scm_instance().name,
1641 pull_request_id=pull_request.pull_request_id,
1641 pull_request_id=pull_request.pull_request_id,
1642 comment_id=comment_id),
1642 comment_id=comment_id),
1643 extra_environ=xhr_header,
1643 extra_environ=xhr_header,
1644 params={'csrf_token': csrf_token},
1644 params={'csrf_token': csrf_token},
1645 status=200
1645 status=200
1646 )
1646 )
1647 assert response.body == 'true'
1647 assert response.text == 'true'
1648
1648
1649 @pytest.mark.parametrize('url_type', [
1649 @pytest.mark.parametrize('url_type', [
1650 'pullrequest_new',
1650 'pullrequest_new',
1651 'pullrequest_create',
1651 'pullrequest_create',
1652 'pullrequest_update',
1652 'pullrequest_update',
1653 'pullrequest_merge',
1653 'pullrequest_merge',
1654 ])
1654 ])
1655 def test_pull_request_is_forbidden_on_archived_repo(
1655 def test_pull_request_is_forbidden_on_archived_repo(
1656 self, autologin_user, backend, xhr_header, user_util, url_type):
1656 self, autologin_user, backend, xhr_header, user_util, url_type):
1657
1657
1658 # create a temporary repo
1658 # create a temporary repo
1659 source = user_util.create_repo(repo_type=backend.alias)
1659 source = user_util.create_repo(repo_type=backend.alias)
1660 repo_name = source.repo_name
1660 repo_name = source.repo_name
1661 repo = Repository.get_by_repo_name(repo_name)
1661 repo = Repository.get_by_repo_name(repo_name)
1662 repo.archived = True
1662 repo.archived = True
1663 Session().commit()
1663 Session().commit()
1664
1664
1665 response = self.app.get(
1665 response = self.app.get(
1666 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1666 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1667
1667
1668 msg = 'Action not supported for archived repository.'
1668 msg = 'Action not supported for archived repository.'
1669 assert_session_flash(response, msg)
1669 assert_session_flash(response, msg)
1670
1670
1671
1671
1672 def assert_pull_request_status(pull_request, expected_status):
1672 def assert_pull_request_status(pull_request, expected_status):
1673 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1673 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1674 assert status == expected_status
1674 assert status == expected_status
1675
1675
1676
1676
1677 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1677 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1678 @pytest.mark.usefixtures("autologin_user")
1678 @pytest.mark.usefixtures("autologin_user")
1679 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1679 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1680 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
1680 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
@@ -1,255 +1,251 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import time
22 import time
23 import logging
23 import logging
24 import datetime
24 import datetime
25 import hashlib
26 import tempfile
25 import tempfile
27 from os.path import join as jn
26 from os.path import join as jn
28
27 import urllib.parse
29 from tempfile import _RandomNameSequence
30
28
31 import pytest
29 import pytest
32
30
33 from rhodecode.model.db import User
31 from rhodecode.model.db import User
34 from rhodecode.lib import auth
32 from rhodecode.lib import auth
35 from rhodecode.lib import helpers as h
33 from rhodecode.lib import helpers as h
36 from rhodecode.lib.helpers import flash
34 from rhodecode.lib.helpers import flash
37 from rhodecode.lib.utils2 import safe_str
35 from rhodecode.lib.str_utils import safe_str
38
36 from rhodecode.lib.hash_utils import sha1_safe
39
37
40 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
41
39
42 __all__ = [
40 __all__ = [
43 'get_new_dir', 'TestController', 'route_path_generator',
41 'get_new_dir', 'TestController', 'route_path_generator',
44 'clear_cache_regions',
42 'clear_cache_regions',
45 'assert_session_flash', 'login_user', 'no_newline_id_generator',
43 'assert_session_flash', 'login_user', 'no_newline_id_generator',
46 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
44 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
47 'NEW_HG_REPO', 'NEW_GIT_REPO',
45 'NEW_HG_REPO', 'NEW_GIT_REPO',
48 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
46 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
49 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
47 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
50 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
48 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
51 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
49 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
52 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
50 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
53 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
51 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
54 ]
52 ]
55
53
56
54
57 # SOME GLOBALS FOR TESTS
55 # SOME GLOBALS FOR TESTS
58 TEST_DIR = tempfile.gettempdir()
56 TEST_DIR = tempfile.gettempdir()
59
57
60 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_{}'.format(next(_RandomNameSequence())))
58 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_{}'.format(next(tempfile._RandomNameSequence())))
61 TEST_USER_ADMIN_LOGIN = 'test_admin'
59 TEST_USER_ADMIN_LOGIN = 'test_admin'
62 TEST_USER_ADMIN_PASS = 'test12'
60 TEST_USER_ADMIN_PASS = 'test12'
63 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
61 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
64
62
65 TEST_USER_REGULAR_LOGIN = 'test_regular'
63 TEST_USER_REGULAR_LOGIN = 'test_regular'
66 TEST_USER_REGULAR_PASS = 'test12'
64 TEST_USER_REGULAR_PASS = 'test12'
67 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
65 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
68
66
69 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
67 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
70 TEST_USER_REGULAR2_PASS = 'test12'
68 TEST_USER_REGULAR2_PASS = 'test12'
71 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
69 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
72
70
73 HG_REPO = 'vcs_test_hg'
71 HG_REPO = 'vcs_test_hg'
74 GIT_REPO = 'vcs_test_git'
72 GIT_REPO = 'vcs_test_git'
75 SVN_REPO = 'vcs_test_svn'
73 SVN_REPO = 'vcs_test_svn'
76
74
77 NEW_HG_REPO = 'vcs_test_hg_new'
75 NEW_HG_REPO = 'vcs_test_hg_new'
78 NEW_GIT_REPO = 'vcs_test_git_new'
76 NEW_GIT_REPO = 'vcs_test_git_new'
79
77
80 HG_FORK = 'vcs_test_hg_fork'
78 HG_FORK = 'vcs_test_hg_fork'
81 GIT_FORK = 'vcs_test_git_fork'
79 GIT_FORK = 'vcs_test_git_fork'
82
80
83 ## VCS
81 ## VCS
84 SCM_TESTS = ['hg', 'git']
82 SCM_TESTS = ['hg', 'git']
85 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
83 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
86
84
87 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
85 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
88 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix)
86 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, f'vcsgitclone{uniq_suffix}')
89 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix)
87 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, f'vcsgitpull{uniq_suffix}')
90
88
91 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
89 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
92 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
90 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, f'vcshgclone{uniq_suffix}')
93 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
91 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, f'vcshgpull{uniq_suffix}')
94
92
95 TEST_REPO_PREFIX = 'vcs-test'
93 TEST_REPO_PREFIX = 'vcs-test'
96
94
97
95
98 def clear_cache_regions(regions=None):
96 def clear_cache_regions(regions=None):
99 # dogpile
97 # dogpile
100 from rhodecode.lib.rc_cache import region_meta
98 from rhodecode.lib.rc_cache import region_meta
101 for region_name, region in region_meta.dogpile_cache_regions.items():
99 for region_name, region in region_meta.dogpile_cache_regions.items():
102 if not regions or region_name in regions:
100 if not regions or region_name in regions:
103 region.invalidate()
101 region.invalidate()
104
102
105
103
106 def get_new_dir(title):
104 def get_new_dir(title):
107 """
105 """
108 Returns always new directory path.
106 Returns always new directory path.
109 """
107 """
110 from rhodecode.tests.vcs.utils import get_normalized_path
108 from rhodecode.tests.vcs.utils import get_normalized_path
111 name_parts = [TEST_REPO_PREFIX]
109 name_parts = [TEST_REPO_PREFIX]
112 if title:
110 if title:
113 name_parts.append(title)
111 name_parts.append(title)
114 hex_str = hashlib.sha1('%s %s' % (os.getpid(), time.time())).hexdigest()
112 hex_str = sha1_safe(f'{os.getpid()} {time.time()}')
115 name_parts.append(hex_str)
113 name_parts.append(hex_str)
116 name = '-'.join(name_parts)
114 name = '-'.join(name_parts)
117 path = os.path.join(TEST_DIR, name)
115 path = os.path.join(TEST_DIR, name)
118 return get_normalized_path(path)
116 return get_normalized_path(path)
119
117
120
118
121 def repo_id_generator(name):
119 def repo_id_generator(name):
122 numeric_hash = 0
120 numeric_hash = 0
123 for char in name:
121 for char in name:
124 numeric_hash += (ord(char))
122 numeric_hash += (ord(char))
125 return numeric_hash
123 return numeric_hash
126
124
127
125
128 @pytest.mark.usefixtures('app', 'index_location')
126 @pytest.mark.usefixtures('app', 'index_location')
129 class TestController(object):
127 class TestController(object):
130
128
131 maxDiff = None
129 maxDiff = None
132
130
133 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
131 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
134 password=TEST_USER_ADMIN_PASS):
132 password=TEST_USER_ADMIN_PASS):
135 self._logged_username = username
133 self._logged_username = username
136 self._session = login_user_session(self.app, username, password)
134 self._session = login_user_session(self.app, username, password)
137 self.csrf_token = auth.get_csrf_token(self._session)
135 self.csrf_token = auth.get_csrf_token(self._session)
138
136
139 return self._session['rhodecode_user']
137 return self._session['rhodecode_user']
140
138
141 def logout_user(self):
139 def logout_user(self):
142 logout_user_session(self.app, auth.get_csrf_token(self._session))
140 logout_user_session(self.app, auth.get_csrf_token(self._session))
143 self.csrf_token = None
141 self.csrf_token = None
144 self._logged_username = None
142 self._logged_username = None
145 self._session = None
143 self._session = None
146
144
147 def _get_logged_user(self):
145 def _get_logged_user(self):
148 return User.get_by_username(self._logged_username)
146 return User.get_by_username(self._logged_username)
149
147
150
148
151 def login_user_session(
149 def login_user_session(
152 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
150 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
153
151
154 response = app.post(
152 response = app.post(
155 h.route_path('login'),
153 h.route_path('login'),
156 {'username': username, 'password': password})
154 {'username': username, 'password': password})
157 if 'invalid user name' in response.body:
155 if 'invalid user name' in response.text:
158 pytest.fail('could not login using %s %s' % (username, password))
156 pytest.fail(f'could not login using {username} {password}')
159
157
160 assert response.status == '302 Found'
158 assert response.status == '302 Found'
161 response = response.follow()
159 response = response.follow()
162 assert response.status == '200 OK'
160 assert response.status == '200 OK'
163
161
164 session = response.get_session_from_response()
162 session = response.get_session_from_response()
165 assert 'rhodecode_user' in session
163 assert 'rhodecode_user' in session
166 rc_user = session['rhodecode_user']
164 rc_user = session['rhodecode_user']
167 assert rc_user.get('username') == username
165 assert rc_user.get('username') == username
168 assert rc_user.get('is_authenticated')
166 assert rc_user.get('is_authenticated')
169
167
170 return session
168 return session
171
169
172
170
173 def logout_user_session(app, csrf_token):
171 def logout_user_session(app, csrf_token):
174 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
172 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
175
173
176
174
177 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
175 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
178 password=TEST_USER_ADMIN_PASS):
176 password=TEST_USER_ADMIN_PASS):
179 return login_user_session(app, username, password)['rhodecode_user']
177 return login_user_session(app, username, password)['rhodecode_user']
180
178
181
179
182 def assert_session_flash(response, msg=None, category=None, no_=None):
180 def assert_session_flash(response, msg=None, category=None, no_=None):
183 """
181 """
184 Assert on a flash message in the current session.
182 Assert on a flash message in the current session.
185
183
186 :param response: Response from give calll, it will contain flash
184 :param response: Response from give calll, it will contain flash
187 messages or bound session with them.
185 messages or bound session with them.
188 :param msg: The expected message. Will be evaluated if a
186 :param msg: The expected message. Will be evaluated if a
189 :class:`LazyString` is passed in.
187 :class:`LazyString` is passed in.
190 :param category: Optional. If passed, the message category will be
188 :param category: Optional. If passed, the message category will be
191 checked as well.
189 checked as well.
192 :param no_: Optional. If passed, the message will be checked to NOT
190 :param no_: Optional. If passed, the message will be checked to NOT
193 be in the flash session
191 be in the flash session
194 """
192 """
195 if msg is None and no_ is None:
193 if msg is None and no_ is None:
196 raise ValueError("Parameter msg or no_ is required.")
194 raise ValueError("Parameter msg or no_ is required.")
197
195
198 if msg and no_:
196 if msg and no_:
199 raise ValueError("Please specify either msg or no_, but not both")
197 raise ValueError("Please specify either msg or no_, but not both")
200
198
201 session = response.get_session_from_response()
199 session = response.get_session_from_response()
202 messages = flash.pop_messages(session=session)
200 messages = flash.pop_messages(session=session)
203 msg = _eval_if_lazy(msg)
201 msg = _eval_if_lazy(msg)
204
202
205 if no_:
203 if no_:
206 error_msg = 'unable to detect no_ message `%s` in empty flash list' % no_
204 error_msg = f'unable to detect no_ message `{no_}` in empty flash list'
207 else:
205 else:
208 error_msg = 'unable to find message `%s` in empty flash list' % msg
206 error_msg = f'unable to find message `{msg}` in empty flash list'
209 assert messages, error_msg
207 assert messages, error_msg
210 message = messages[0]
208 message = messages[0]
211
209
212 message_text = _eval_if_lazy(message.message) or ''
210 message_text = _eval_if_lazy(message.message) or ''
213
211
214 if no_:
212 if no_:
215 if no_ in message_text:
213 if no_ in message_text:
216 msg = u'msg `%s` found in session flash.' % (no_,)
214 msg = f'msg `{no_}` found in session flash.'
217 pytest.fail(safe_str(msg))
215 pytest.fail(safe_str(msg))
218 else:
216 else:
219 if msg not in message_text:
217 if msg not in message_text:
220 fail_msg = u'msg `%s` not found in session ' \
218 fail_msg = f'msg `{msg}` not found in ' \
221 u'flash: got `%s` (type:%s) instead' % (
219 f'session flash: got `{message_text}` (type:{type(message_text)}) instead'
222 msg, message_text, type(message_text))
223
220
224 pytest.fail(safe_str(fail_msg))
221 pytest.fail(safe_str(fail_msg))
225 if category:
222 if category:
226 assert category == message.category
223 assert category == message.category
227
224
228
225
229 def _eval_if_lazy(value):
226 def _eval_if_lazy(value):
230 return value.eval() if hasattr(value, 'eval') else value
227 return value.eval() if hasattr(value, 'eval') else value
231
228
232
229
233 def no_newline_id_generator(test_name):
230 def no_newline_id_generator(test_name):
234 """
231 """
235 Generates a test name without spaces or newlines characters. Used for
232 Generates a test name without spaces or newlines characters. Used for
236 nicer output of progress of test
233 nicer output of progress of test
237 """
234 """
238 org_name = test_name
235
239 test_name = safe_str(test_name)\
236 test_name = safe_str(test_name)\
240 .replace('\n', '_N') \
237 .replace('\n', '_N') \
241 .replace('\r', '_N') \
238 .replace('\r', '_N') \
242 .replace('\t', '_T') \
239 .replace('\t', '_T') \
243 .replace(' ', '_S')
240 .replace(' ', '_S')
244
241
245 return test_name or 'test-with-empty-name'
242 return test_name or 'test-with-empty-name'
246
243
247
244
248 def route_path_generator(url_defs, name, params=None, **kwargs):
245 def route_path_generator(url_defs, name, params=None, **kwargs):
249 import urllib.request, urllib.parse, urllib.error
250
246
251 base_url = url_defs[name].format(**kwargs)
247 base_url = url_defs[name].format(**kwargs)
252
248
253 if params:
249 if params:
254 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
250 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
255 return base_url
251 return base_url
@@ -1,222 +1,222 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib.pyramid_utils import get_app_config
23 from rhodecode.lib.pyramid_utils import get_app_config
24 from rhodecode.tests.fixture import TestINI
24 from rhodecode.tests.fixture import TestINI
25 from rhodecode.tests.server_utils import RcVCSServer
25 from rhodecode.tests.server_utils import RcVCSServer
26
26
27
27
28 @pytest.fixture(scope='session')
28 @pytest.fixture(scope='session')
29 def vcsserver(request, vcsserver_port, vcsserver_factory):
29 def vcsserver(request, vcsserver_port, vcsserver_factory):
30 """
30 """
31 Session scope VCSServer.
31 Session scope VCSServer.
32
32
33 Tests wich need the VCSServer have to rely on this fixture in order
33 Tests wich need the VCSServer have to rely on this fixture in order
34 to ensure it will be running.
34 to ensure it will be running.
35
35
36 For specific needs, the fixture vcsserver_factory can be used. It allows to
36 For specific needs, the fixture vcsserver_factory can be used. It allows to
37 adjust the configuration file for the test run.
37 adjust the configuration file for the test run.
38
38
39 Command line args:
39 Command line args:
40
40
41 --without-vcsserver: Allows to switch this fixture off. You have to
41 --without-vcsserver: Allows to switch this fixture off. You have to
42 manually start the server.
42 manually start the server.
43
43
44 --vcsserver-port: Will expect the VCSServer to listen on this port.
44 --vcsserver-port: Will expect the VCSServer to listen on this port.
45 """
45 """
46
46
47 if not request.config.getoption('with_vcsserver'):
47 if not request.config.getoption('with_vcsserver'):
48 return None
48 return None
49
49
50 return vcsserver_factory(
50 return vcsserver_factory(
51 request, vcsserver_port=vcsserver_port)
51 request, vcsserver_port=vcsserver_port)
52
52
53
53
54 @pytest.fixture(scope='session')
54 @pytest.fixture(scope='session')
55 def vcsserver_factory(tmpdir_factory):
55 def vcsserver_factory(tmpdir_factory):
56 """
56 """
57 Use this if you need a running vcsserver with a special configuration.
57 Use this if you need a running vcsserver with a special configuration.
58 """
58 """
59
59
60 def factory(request, overrides=(), vcsserver_port=None,
60 def factory(request, overrides=(), vcsserver_port=None,
61 log_file=None):
61 log_file=None):
62
62
63 if vcsserver_port is None:
63 if vcsserver_port is None:
64 vcsserver_port = get_available_port()
64 vcsserver_port = get_available_port()
65
65
66 overrides = list(overrides)
66 overrides = list(overrides)
67 overrides.append({'server:main': {'port': vcsserver_port}})
67 overrides.append({'server:main': {'port': vcsserver_port}})
68
68
69 option_name = 'vcsserver_config_http'
69 option_name = 'vcsserver_config_http'
70 override_option_name = 'vcsserver_config_override'
70 override_option_name = 'vcsserver_config_override'
71 config_file = get_config(
71 config_file = get_config(
72 request.config, option_name=option_name,
72 request.config, option_name=option_name,
73 override_option_name=override_option_name, overrides=overrides,
73 override_option_name=override_option_name, overrides=overrides,
74 basetemp=tmpdir_factory.getbasetemp().strpath,
74 basetemp=tmpdir_factory.getbasetemp().strpath,
75 prefix='test_vcs_')
75 prefix='test_vcs_')
76
76
77 server = RcVCSServer(config_file, log_file)
77 server = RcVCSServer(config_file, log_file)
78 server.start()
78 server.start()
79
79
80 @request.addfinalizer
80 @request.addfinalizer
81 def cleanup():
81 def cleanup():
82 server.shutdown()
82 server.shutdown()
83
83
84 server.wait_until_ready()
84 server.wait_until_ready()
85 return server
85 return server
86
86
87 return factory
87 return factory
88
88
89
89
90 def _use_log_level(config):
90 def _use_log_level(config):
91 level = config.getoption('test_loglevel') or 'critical'
91 level = config.getoption('test_loglevel') or 'critical'
92 return level.upper()
92 return level.upper()
93
93
94
94
95 @pytest.fixture(scope='session')
95 @pytest.fixture(scope='session')
96 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
96 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
97 option_name = 'pyramid_config'
97 option_name = 'pyramid_config'
98 log_level = _use_log_level(request.config)
98 log_level = _use_log_level(request.config)
99
99
100 overrides = [
100 overrides = [
101 {'server:main': {'port': rcserver_port}},
101 {'server:main': {'port': rcserver_port}},
102 {'app:main': {
102 {'app:main': {
103 'vcs.server': 'localhost:%s' % vcsserver_port,
103 'vcs.server': 'localhost:%s' % vcsserver_port,
104 # johbo: We will always start the VCSServer on our own based on the
104 # johbo: We will always start the VCSServer on our own based on the
105 # fixtures of the test cases. For the test run it must always be
105 # fixtures of the test cases. For the test run it must always be
106 # off in the INI file.
106 # off in the INI file.
107 'vcs.start_server': 'false',
107 'vcs.start_server': 'false',
108
108
109 'vcs.server.protocol': 'http',
109 'vcs.server.protocol': 'http',
110 'vcs.scm_app_implementation': 'http',
110 'vcs.scm_app_implementation': 'http',
111 'vcs.hooks.protocol': 'http',
111 'vcs.hooks.protocol': 'http',
112 'vcs.hooks.host': '127.0.0.1',
112 'vcs.hooks.host': '127.0.0.1',
113 }},
113 }},
114
114
115 {'handler_console': {
115 {'handler_console': {
116 'class': 'StreamHandler',
116 'class': 'StreamHandler',
117 'args': '(sys.stderr,)',
117 'args': '(sys.stderr,)',
118 'level': log_level,
118 'level': log_level,
119 }},
119 }},
120
120
121 ]
121 ]
122
122
123 filename = get_config(
123 filename = get_config(
124 request.config, option_name=option_name,
124 request.config, option_name=option_name,
125 override_option_name='{}_override'.format(option_name),
125 override_option_name='{}_override'.format(option_name),
126 overrides=overrides,
126 overrides=overrides,
127 basetemp=tmpdir_factory.getbasetemp().strpath,
127 basetemp=tmpdir_factory.getbasetemp().strpath,
128 prefix='test_rce_')
128 prefix='test_rce_')
129 return filename
129 return filename
130
130
131
131
132 @pytest.fixture(scope='session')
132 @pytest.fixture(scope='session')
133 def ini_settings(ini_config):
133 def ini_settings(ini_config):
134 ini_path = ini_config
134 ini_path = ini_config
135 return get_app_config(ini_path)
135 return get_app_config(ini_path)
136
136
137
137
138 def get_available_port(min_port=40000, max_port=55555):
138 def get_available_port(min_port=40000, max_port=55555):
139 from rhodecode.lib.utils2 import get_available_port as _get_port
139 from rhodecode.lib.utils2 import get_available_port as _get_port
140 return _get_port(min_port, max_port)
140 return _get_port(min_port, max_port)
141
141
142
142
143 @pytest.fixture(scope='session')
143 @pytest.fixture(scope='session')
144 def rcserver_port(request):
144 def rcserver_port(request):
145 port = get_available_port()
145 port = get_available_port()
146 print('Using rhodecode port {}'.format(port))
146 print(f'Using rhodecode port {port}')
147 return port
147 return port
148
148
149
149
150 @pytest.fixture(scope='session')
150 @pytest.fixture(scope='session')
151 def vcsserver_port(request):
151 def vcsserver_port(request):
152 port = request.config.getoption('--vcsserver-port')
152 port = request.config.getoption('--vcsserver-port')
153 if port is None:
153 if port is None:
154 port = get_available_port()
154 port = get_available_port()
155 print('Using vcsserver port {}'.format(port))
155 print(f'Using vcsserver port {port}')
156 return port
156 return port
157
157
158
158
159 @pytest.fixture(scope='session')
159 @pytest.fixture(scope='session')
160 def available_port_factory():
160 def available_port_factory():
161 """
161 """
162 Returns a callable which returns free port numbers.
162 Returns a callable which returns free port numbers.
163 """
163 """
164 return get_available_port
164 return get_available_port
165
165
166
166
167 @pytest.fixture()
167 @pytest.fixture()
168 def available_port(available_port_factory):
168 def available_port(available_port_factory):
169 """
169 """
170 Gives you one free port for the current test.
170 Gives you one free port for the current test.
171
171
172 Uses "available_port_factory" to retrieve the port.
172 Uses "available_port_factory" to retrieve the port.
173 """
173 """
174 return available_port_factory()
174 return available_port_factory()
175
175
176
176
177 @pytest.fixture(scope='session')
177 @pytest.fixture(scope='session')
178 def testini_factory(tmpdir_factory, ini_config):
178 def testini_factory(tmpdir_factory, ini_config):
179 """
179 """
180 Factory to create an INI file based on TestINI.
180 Factory to create an INI file based on TestINI.
181
181
182 It will make sure to place the INI file in the correct directory.
182 It will make sure to place the INI file in the correct directory.
183 """
183 """
184 basetemp = tmpdir_factory.getbasetemp().strpath
184 basetemp = tmpdir_factory.getbasetemp().strpath
185 return TestIniFactory(basetemp, ini_config)
185 return TestIniFactory(basetemp, ini_config)
186
186
187
187
188 class TestIniFactory(object):
188 class TestIniFactory(object):
189
189
190 def __init__(self, basetemp, template_ini):
190 def __init__(self, basetemp, template_ini):
191 self._basetemp = basetemp
191 self._basetemp = basetemp
192 self._template_ini = template_ini
192 self._template_ini = template_ini
193
193
194 def __call__(self, ini_params, new_file_prefix='test'):
194 def __call__(self, ini_params, new_file_prefix='test'):
195 ini_file = TestINI(
195 ini_file = TestINI(
196 self._template_ini, ini_params=ini_params,
196 self._template_ini, ini_params=ini_params,
197 new_file_prefix=new_file_prefix, dir=self._basetemp)
197 new_file_prefix=new_file_prefix, dir=self._basetemp)
198 result = ini_file.create()
198 result = ini_file.create()
199 return result
199 return result
200
200
201
201
202 def get_config(
202 def get_config(
203 config, option_name, override_option_name, overrides=None,
203 config, option_name, override_option_name, overrides=None,
204 basetemp=None, prefix='test'):
204 basetemp=None, prefix='test'):
205 """
205 """
206 Find a configuration file and apply overrides for the given `prefix`.
206 Find a configuration file and apply overrides for the given `prefix`.
207 """
207 """
208 config_file = (
208 config_file = (
209 config.getoption(option_name) or config.getini(option_name))
209 config.getoption(option_name) or config.getini(option_name))
210 if not config_file:
210 if not config_file:
211 pytest.exit(
211 pytest.exit(
212 "Configuration error, could not extract {}.".format(option_name))
212 "Configuration error, could not extract {}.".format(option_name))
213
213
214 overrides = overrides or []
214 overrides = overrides or []
215 config_override = config.getoption(override_option_name)
215 config_override = config.getoption(override_option_name)
216 if config_override:
216 if config_override:
217 overrides.append(config_override)
217 overrides.append(config_override)
218 temp_ini_file = TestINI(
218 temp_ini_file = TestINI(
219 config_file, ini_params=overrides, new_file_prefix=prefix,
219 config_file, ini_params=overrides, new_file_prefix=prefix,
220 dir=basetemp)
220 dir=basetemp)
221
221
222 return temp_ini_file.create()
222 return temp_ini_file.create()
@@ -1,1724 +1,1725 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
24 import os
23 import os
25 import re
24 import re
26 import pprint
25 import pprint
27 import shutil
26 import shutil
28 import socket
27 import socket
29 import subprocess
28 import subprocess
30 import time
29 import time
31 import uuid
30 import uuid
32 import dateutil.tz
31 import dateutil.tz
33 import logging
32 import logging
33 import functools
34
34
35 import mock
35 import mock
36 import pyramid.testing
36 import pyramid.testing
37 import pytest
37 import pytest
38 import colander
38 import colander
39 import requests
39 import requests
40 import pyramid.paster
40 import pyramid.paster
41
41
42 import rhodecode
42 import rhodecode
43 from rhodecode.lib.utils2 import AttributeDict
43 import rhodecode.lib
44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.db import (
46 from rhodecode.model.db import (
47 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
48 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
56 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.model.integration import IntegrationModel
57 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations import integration_type_registry
58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.hash_utils import sha1_safe
60 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.tests import (
63 from rhodecode.tests import (
63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_REGULAR_PASS)
66 TEST_USER_REGULAR_PASS)
66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.fixture import Fixture
68 from rhodecode.tests.fixture import Fixture
68 from rhodecode.config import utils as config_utils
69 from rhodecode.config import utils as config_utils
69
70
70 log = logging.getLogger(__name__)
71 log = logging.getLogger(__name__)
71
72
72
73
73 def cmp(a, b):
74 def cmp(a, b):
74 # backport cmp from python2 so we can still use it in the custom code in this module
75 # backport cmp from python2 so we can still use it in the custom code in this module
75 return (a > b) - (a < b)
76 return (a > b) - (a < b)
76
77
78
77 @pytest.fixture(scope='session', autouse=True)
79 @pytest.fixture(scope='session', autouse=True)
78 def activate_example_rcextensions(request):
80 def activate_example_rcextensions(request):
79 """
81 """
80 Patch in an example rcextensions module which verifies passed in kwargs.
82 Patch in an example rcextensions module which verifies passed in kwargs.
81 """
83 """
82 from rhodecode.config import rcextensions
84 from rhodecode.config import rcextensions
83
85
84 old_extensions = rhodecode.EXTENSIONS
86 old_extensions = rhodecode.EXTENSIONS
85 rhodecode.EXTENSIONS = rcextensions
87 rhodecode.EXTENSIONS = rcextensions
86 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
87
89
88 @request.addfinalizer
90 @request.addfinalizer
89 def cleanup():
91 def cleanup():
90 rhodecode.EXTENSIONS = old_extensions
92 rhodecode.EXTENSIONS = old_extensions
91
93
92
94
93 @pytest.fixture()
95 @pytest.fixture()
94 def capture_rcextensions():
96 def capture_rcextensions():
95 """
97 """
96 Returns the recorded calls to entry points in rcextensions.
98 Returns the recorded calls to entry points in rcextensions.
97 """
99 """
98 calls = rhodecode.EXTENSIONS.calls
100 calls = rhodecode.EXTENSIONS.calls
99 calls.clear()
101 calls.clear()
100 # Note: At this moment, it is still the empty dict, but that will
102 # Note: At this moment, it is still the empty dict, but that will
101 # be filled during the test run and since it is a reference this
103 # be filled during the test run and since it is a reference this
102 # is enough to make it work.
104 # is enough to make it work.
103 return calls
105 return calls
104
106
105
107
106 @pytest.fixture(scope='session')
108 @pytest.fixture(scope='session')
107 def http_environ_session():
109 def http_environ_session():
108 """
110 """
109 Allow to use "http_environ" in session scope.
111 Allow to use "http_environ" in session scope.
110 """
112 """
111 return plain_http_environ()
113 return plain_http_environ()
112
114
113
115
114 def plain_http_host_stub():
116 def plain_http_host_stub():
115 """
117 """
116 Value of HTTP_HOST in the test run.
118 Value of HTTP_HOST in the test run.
117 """
119 """
118 return 'example.com:80'
120 return 'example.com:80'
119
121
120
122
121 @pytest.fixture()
123 @pytest.fixture()
122 def http_host_stub():
124 def http_host_stub():
123 """
125 """
124 Value of HTTP_HOST in the test run.
126 Value of HTTP_HOST in the test run.
125 """
127 """
126 return plain_http_host_stub()
128 return plain_http_host_stub()
127
129
128
130
129 def plain_http_host_only_stub():
131 def plain_http_host_only_stub():
130 """
132 """
131 Value of HTTP_HOST in the test run.
133 Value of HTTP_HOST in the test run.
132 """
134 """
133 return plain_http_host_stub().split(':')[0]
135 return plain_http_host_stub().split(':')[0]
134
136
135
137
136 @pytest.fixture()
138 @pytest.fixture()
137 def http_host_only_stub():
139 def http_host_only_stub():
138 """
140 """
139 Value of HTTP_HOST in the test run.
141 Value of HTTP_HOST in the test run.
140 """
142 """
141 return plain_http_host_only_stub()
143 return plain_http_host_only_stub()
142
144
143
145
144 def plain_http_environ():
146 def plain_http_environ():
145 """
147 """
146 HTTP extra environ keys.
148 HTTP extra environ keys.
147
149
148 User by the test application and as well for setting up the pylons
150 User by the test application and as well for setting up the pylons
149 environment. In the case of the fixture "app" it should be possible
151 environment. In the case of the fixture "app" it should be possible
150 to override this for a specific test case.
152 to override this for a specific test case.
151 """
153 """
152 return {
154 return {
153 'SERVER_NAME': plain_http_host_only_stub(),
155 'SERVER_NAME': plain_http_host_only_stub(),
154 'SERVER_PORT': plain_http_host_stub().split(':')[1],
156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
155 'HTTP_HOST': plain_http_host_stub(),
157 'HTTP_HOST': plain_http_host_stub(),
156 'HTTP_USER_AGENT': 'rc-test-agent',
158 'HTTP_USER_AGENT': 'rc-test-agent',
157 'REQUEST_METHOD': 'GET'
159 'REQUEST_METHOD': 'GET'
158 }
160 }
159
161
160
162
161 @pytest.fixture()
163 @pytest.fixture()
162 def http_environ():
164 def http_environ():
163 """
165 """
164 HTTP extra environ keys.
166 HTTP extra environ keys.
165
167
166 User by the test application and as well for setting up the pylons
168 User by the test application and as well for setting up the pylons
167 environment. In the case of the fixture "app" it should be possible
169 environment. In the case of the fixture "app" it should be possible
168 to override this for a specific test case.
170 to override this for a specific test case.
169 """
171 """
170 return plain_http_environ()
172 return plain_http_environ()
171
173
172
174
173 @pytest.fixture(scope='session')
175 @pytest.fixture(scope='session')
174 def baseapp(ini_config, vcsserver, http_environ_session):
176 def baseapp(ini_config, vcsserver, http_environ_session):
175 from rhodecode.lib.pyramid_utils import get_app_config
177 from rhodecode.lib.pyramid_utils import get_app_config
176 from rhodecode.config.middleware import make_pyramid_app
178 from rhodecode.config.middleware import make_pyramid_app
177
179
178 log.info("Using the RhodeCode configuration:{}".format(ini_config))
180 log.info("Using the RhodeCode configuration:{}".format(ini_config))
179 pyramid.paster.setup_logging(ini_config)
181 pyramid.paster.setup_logging(ini_config)
180
182
181 settings = get_app_config(ini_config)
183 settings = get_app_config(ini_config)
182 app = make_pyramid_app({'__file__': ini_config}, **settings)
184 app = make_pyramid_app({'__file__': ini_config}, **settings)
183
185
184 return app
186 return app
185
187
186
188
187 @pytest.fixture(scope='function')
189 @pytest.fixture(scope='function')
188 def app(request, config_stub, baseapp, http_environ):
190 def app(request, config_stub, baseapp, http_environ):
189 app = CustomTestApp(
191 app = CustomTestApp(
190 baseapp,
192 baseapp,
191 extra_environ=http_environ)
193 extra_environ=http_environ)
192 if request.cls:
194 if request.cls:
193 request.cls.app = app
195 request.cls.app = app
194 return app
196 return app
195
197
196
198
197 @pytest.fixture(scope='session')
199 @pytest.fixture(scope='session')
198 def app_settings(baseapp, ini_config):
200 def app_settings(baseapp, ini_config):
199 """
201 """
200 Settings dictionary used to create the app.
202 Settings dictionary used to create the app.
201
203
202 Parses the ini file and passes the result through the sanitize and apply
204 Parses the ini file and passes the result through the sanitize and apply
203 defaults mechanism in `rhodecode.config.middleware`.
205 defaults mechanism in `rhodecode.config.middleware`.
204 """
206 """
205 return baseapp.config.get_settings()
207 return baseapp.config.get_settings()
206
208
207
209
208 @pytest.fixture(scope='session')
210 @pytest.fixture(scope='session')
209 def db_connection(ini_settings):
211 def db_connection(ini_settings):
210 # Initialize the database connection.
212 # Initialize the database connection.
211 config_utils.initialize_database(ini_settings)
213 config_utils.initialize_database(ini_settings)
212
214
213
215
214 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
215
217
216
218
217 def _autologin_user(app, *args):
219 def _autologin_user(app, *args):
218 session = login_user_session(app, *args)
220 session = login_user_session(app, *args)
219 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
220 return LoginData(csrf_token, session['rhodecode_user'])
222 return LoginData(csrf_token, session['rhodecode_user'])
221
223
222
224
223 @pytest.fixture()
225 @pytest.fixture()
224 def autologin_user(app):
226 def autologin_user(app):
225 """
227 """
226 Utility fixture which makes sure that the admin user is logged in
228 Utility fixture which makes sure that the admin user is logged in
227 """
229 """
228 return _autologin_user(app)
230 return _autologin_user(app)
229
231
230
232
231 @pytest.fixture()
233 @pytest.fixture()
232 def autologin_regular_user(app):
234 def autologin_regular_user(app):
233 """
235 """
234 Utility fixture which makes sure that the regular user is logged in
236 Utility fixture which makes sure that the regular user is logged in
235 """
237 """
236 return _autologin_user(
238 return _autologin_user(
237 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
238
240
239
241
240 @pytest.fixture(scope='function')
242 @pytest.fixture(scope='function')
241 def csrf_token(request, autologin_user):
243 def csrf_token(request, autologin_user):
242 return autologin_user.csrf_token
244 return autologin_user.csrf_token
243
245
244
246
245 @pytest.fixture(scope='function')
247 @pytest.fixture(scope='function')
246 def xhr_header(request):
248 def xhr_header(request):
247 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
248
250
249
251
250 @pytest.fixture()
252 @pytest.fixture()
251 def real_crypto_backend(monkeypatch):
253 def real_crypto_backend(monkeypatch):
252 """
254 """
253 Switch the production crypto backend on for this test.
255 Switch the production crypto backend on for this test.
254
256
255 During the test run the crypto backend is replaced with a faster
257 During the test run the crypto backend is replaced with a faster
256 implementation based on the MD5 algorithm.
258 implementation based on the MD5 algorithm.
257 """
259 """
258 monkeypatch.setattr(rhodecode, 'is_test', False)
260 monkeypatch.setattr(rhodecode, 'is_test', False)
259
261
260
262
261 @pytest.fixture(scope='class')
263 @pytest.fixture(scope='class')
262 def index_location(request, baseapp):
264 def index_location(request, baseapp):
263 index_location = baseapp.config.get_settings()['search.location']
265 index_location = baseapp.config.get_settings()['search.location']
264 if request.cls:
266 if request.cls:
265 request.cls.index_location = index_location
267 request.cls.index_location = index_location
266 return index_location
268 return index_location
267
269
268
270
269 @pytest.fixture(scope='session', autouse=True)
271 @pytest.fixture(scope='session', autouse=True)
270 def tests_tmp_path(request):
272 def tests_tmp_path(request):
271 """
273 """
272 Create temporary directory to be used during the test session.
274 Create temporary directory to be used during the test session.
273 """
275 """
274 if not os.path.exists(TESTS_TMP_PATH):
276 if not os.path.exists(TESTS_TMP_PATH):
275 os.makedirs(TESTS_TMP_PATH)
277 os.makedirs(TESTS_TMP_PATH)
276
278
277 if not request.config.getoption('--keep-tmp-path'):
279 if not request.config.getoption('--keep-tmp-path'):
278 @request.addfinalizer
280 @request.addfinalizer
279 def remove_tmp_path():
281 def remove_tmp_path():
280 shutil.rmtree(TESTS_TMP_PATH)
282 shutil.rmtree(TESTS_TMP_PATH)
281
283
282 return TESTS_TMP_PATH
284 return TESTS_TMP_PATH
283
285
284
286
285 @pytest.fixture()
287 @pytest.fixture()
286 def test_repo_group(request):
288 def test_repo_group(request):
287 """
289 """
288 Create a temporary repository group, and destroy it after
290 Create a temporary repository group, and destroy it after
289 usage automatically
291 usage automatically
290 """
292 """
291 fixture = Fixture()
293 fixture = Fixture()
292 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
293 repo_group = fixture.create_repo_group(repogroupid)
295 repo_group = fixture.create_repo_group(repogroupid)
294
296
295 def _cleanup():
297 def _cleanup():
296 fixture.destroy_repo_group(repogroupid)
298 fixture.destroy_repo_group(repogroupid)
297
299
298 request.addfinalizer(_cleanup)
300 request.addfinalizer(_cleanup)
299 return repo_group
301 return repo_group
300
302
301
303
302 @pytest.fixture()
304 @pytest.fixture()
303 def test_user_group(request):
305 def test_user_group(request):
304 """
306 """
305 Create a temporary user group, and destroy it after
307 Create a temporary user group, and destroy it after
306 usage automatically
308 usage automatically
307 """
309 """
308 fixture = Fixture()
310 fixture = Fixture()
309 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
310 user_group = fixture.create_user_group(usergroupid)
312 user_group = fixture.create_user_group(usergroupid)
311
313
312 def _cleanup():
314 def _cleanup():
313 fixture.destroy_user_group(user_group)
315 fixture.destroy_user_group(user_group)
314
316
315 request.addfinalizer(_cleanup)
317 request.addfinalizer(_cleanup)
316 return user_group
318 return user_group
317
319
318
320
319 @pytest.fixture(scope='session')
321 @pytest.fixture(scope='session')
320 def test_repo(request):
322 def test_repo(request):
321 container = TestRepoContainer()
323 container = TestRepoContainer()
322 request.addfinalizer(container._cleanup)
324 request.addfinalizer(container._cleanup)
323 return container
325 return container
324
326
325
327
326 class TestRepoContainer(object):
328 class TestRepoContainer(object):
327 """
329 """
328 Container for test repositories which are used read only.
330 Container for test repositories which are used read only.
329
331
330 Repositories will be created on demand and re-used during the lifetime
332 Repositories will be created on demand and re-used during the lifetime
331 of this object.
333 of this object.
332
334
333 Usage to get the svn test repository "minimal"::
335 Usage to get the svn test repository "minimal"::
334
336
335 test_repo = TestContainer()
337 test_repo = TestContainer()
336 repo = test_repo('minimal', 'svn')
338 repo = test_repo('minimal', 'svn')
337
339
338 """
340 """
339
341
340 dump_extractors = {
342 dump_extractors = {
341 'git': utils.extract_git_repo_from_dump,
343 'git': utils.extract_git_repo_from_dump,
342 'hg': utils.extract_hg_repo_from_dump,
344 'hg': utils.extract_hg_repo_from_dump,
343 'svn': utils.extract_svn_repo_from_dump,
345 'svn': utils.extract_svn_repo_from_dump,
344 }
346 }
345
347
346 def __init__(self):
348 def __init__(self):
347 self._cleanup_repos = []
349 self._cleanup_repos = []
348 self._fixture = Fixture()
350 self._fixture = Fixture()
349 self._repos = {}
351 self._repos = {}
350
352
351 def __call__(self, dump_name, backend_alias, config=None):
353 def __call__(self, dump_name, backend_alias, config=None):
352 key = (dump_name, backend_alias)
354 key = (dump_name, backend_alias)
353 if key not in self._repos:
355 if key not in self._repos:
354 repo = self._create_repo(dump_name, backend_alias, config)
356 repo = self._create_repo(dump_name, backend_alias, config)
355 self._repos[key] = repo.repo_id
357 self._repos[key] = repo.repo_id
356 return Repository.get(self._repos[key])
358 return Repository.get(self._repos[key])
357
359
358 def _create_repo(self, dump_name, backend_alias, config):
360 def _create_repo(self, dump_name, backend_alias, config):
359 repo_name = '%s-%s' % (backend_alias, dump_name)
361 repo_name = '%s-%s' % (backend_alias, dump_name)
360 backend = get_backend(backend_alias)
362 backend = get_backend(backend_alias)
361 dump_extractor = self.dump_extractors[backend_alias]
363 dump_extractor = self.dump_extractors[backend_alias]
362 repo_path = dump_extractor(dump_name, repo_name)
364 repo_path = dump_extractor(dump_name, repo_name)
363
365
364 vcs_repo = backend(repo_path, config=config)
366 vcs_repo = backend(repo_path, config=config)
365 repo2db_mapper({repo_name: vcs_repo})
367 repo2db_mapper({repo_name: vcs_repo})
366
368
367 repo = RepoModel().get_by_repo_name(repo_name)
369 repo = RepoModel().get_by_repo_name(repo_name)
368 self._cleanup_repos.append(repo_name)
370 self._cleanup_repos.append(repo_name)
369 return repo
371 return repo
370
372
371 def _cleanup(self):
373 def _cleanup(self):
372 for repo_name in reversed(self._cleanup_repos):
374 for repo_name in reversed(self._cleanup_repos):
373 self._fixture.destroy_repo(repo_name)
375 self._fixture.destroy_repo(repo_name)
374
376
375
377
376 def backend_base(request, backend_alias, baseapp, test_repo):
378 def backend_base(request, backend_alias, baseapp, test_repo):
377 if backend_alias not in request.config.getoption('--backends'):
379 if backend_alias not in request.config.getoption('--backends'):
378 pytest.skip("Backend %s not selected." % (backend_alias, ))
380 pytest.skip("Backend %s not selected." % (backend_alias, ))
379
381
380 utils.check_xfail_backends(request.node, backend_alias)
382 utils.check_xfail_backends(request.node, backend_alias)
381 utils.check_skip_backends(request.node, backend_alias)
383 utils.check_skip_backends(request.node, backend_alias)
382
384
383 repo_name = 'vcs_test_%s' % (backend_alias, )
385 repo_name = 'vcs_test_%s' % (backend_alias, )
384 backend = Backend(
386 backend = Backend(
385 alias=backend_alias,
387 alias=backend_alias,
386 repo_name=repo_name,
388 repo_name=repo_name,
387 test_name=request.node.name,
389 test_name=request.node.name,
388 test_repo_container=test_repo)
390 test_repo_container=test_repo)
389 request.addfinalizer(backend.cleanup)
391 request.addfinalizer(backend.cleanup)
390 return backend
392 return backend
391
393
392
394
393 @pytest.fixture()
395 @pytest.fixture()
394 def backend(request, backend_alias, baseapp, test_repo):
396 def backend(request, backend_alias, baseapp, test_repo):
395 """
397 """
396 Parametrized fixture which represents a single backend implementation.
398 Parametrized fixture which represents a single backend implementation.
397
399
398 It respects the option `--backends` to focus the test run on specific
400 It respects the option `--backends` to focus the test run on specific
399 backend implementations.
401 backend implementations.
400
402
401 It also supports `pytest.mark.xfail_backends` to mark tests as failing
403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
402 for specific backends. This is intended as a utility for incremental
404 for specific backends. This is intended as a utility for incremental
403 development of a new backend implementation.
405 development of a new backend implementation.
404 """
406 """
405 return backend_base(request, backend_alias, baseapp, test_repo)
407 return backend_base(request, backend_alias, baseapp, test_repo)
406
408
407
409
408 @pytest.fixture()
410 @pytest.fixture()
409 def backend_git(request, baseapp, test_repo):
411 def backend_git(request, baseapp, test_repo):
410 return backend_base(request, 'git', baseapp, test_repo)
412 return backend_base(request, 'git', baseapp, test_repo)
411
413
412
414
413 @pytest.fixture()
415 @pytest.fixture()
414 def backend_hg(request, baseapp, test_repo):
416 def backend_hg(request, baseapp, test_repo):
415 return backend_base(request, 'hg', baseapp, test_repo)
417 return backend_base(request, 'hg', baseapp, test_repo)
416
418
417
419
418 @pytest.fixture()
420 @pytest.fixture()
419 def backend_svn(request, baseapp, test_repo):
421 def backend_svn(request, baseapp, test_repo):
420 return backend_base(request, 'svn', baseapp, test_repo)
422 return backend_base(request, 'svn', baseapp, test_repo)
421
423
422
424
423 @pytest.fixture()
425 @pytest.fixture()
424 def backend_random(backend_git):
426 def backend_random(backend_git):
425 """
427 """
426 Use this to express that your tests need "a backend.
428 Use this to express that your tests need "a backend.
427
429
428 A few of our tests need a backend, so that we can run the code. This
430 A few of our tests need a backend, so that we can run the code. This
429 fixture is intended to be used for such cases. It will pick one of the
431 fixture is intended to be used for such cases. It will pick one of the
430 backends and run the tests.
432 backends and run the tests.
431
433
432 The fixture `backend` would run the test multiple times for each
434 The fixture `backend` would run the test multiple times for each
433 available backend which is a pure waste of time if the test is
435 available backend which is a pure waste of time if the test is
434 independent of the backend type.
436 independent of the backend type.
435 """
437 """
436 # TODO: johbo: Change this to pick a random backend
438 # TODO: johbo: Change this to pick a random backend
437 return backend_git
439 return backend_git
438
440
439
441
440 @pytest.fixture()
442 @pytest.fixture()
441 def backend_stub(backend_git):
443 def backend_stub(backend_git):
442 """
444 """
443 Use this to express that your tests need a backend stub
445 Use this to express that your tests need a backend stub
444
446
445 TODO: mikhail: Implement a real stub logic instead of returning
447 TODO: mikhail: Implement a real stub logic instead of returning
446 a git backend
448 a git backend
447 """
449 """
448 return backend_git
450 return backend_git
449
451
450
452
451 @pytest.fixture()
453 @pytest.fixture()
452 def repo_stub(backend_stub):
454 def repo_stub(backend_stub):
453 """
455 """
454 Use this to express that your tests need a repository stub
456 Use this to express that your tests need a repository stub
455 """
457 """
456 return backend_stub.create_repo()
458 return backend_stub.create_repo()
457
459
458
460
459 class Backend(object):
461 class Backend(object):
460 """
462 """
461 Represents the test configuration for one supported backend
463 Represents the test configuration for one supported backend
462
464
463 Provides easy access to different test repositories based on
465 Provides easy access to different test repositories based on
464 `__getitem__`. Such repositories will only be created once per test
466 `__getitem__`. Such repositories will only be created once per test
465 session.
467 session.
466 """
468 """
467
469
468 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
469 _master_repo = None
471 _master_repo = None
470 _master_repo_path = ''
472 _master_repo_path = ''
471 _commit_ids = {}
473 _commit_ids = {}
472
474
473 def __init__(self, alias, repo_name, test_name, test_repo_container):
475 def __init__(self, alias, repo_name, test_name, test_repo_container):
474 self.alias = alias
476 self.alias = alias
475 self.repo_name = repo_name
477 self.repo_name = repo_name
476 self._cleanup_repos = []
478 self._cleanup_repos = []
477 self._test_name = test_name
479 self._test_name = test_name
478 self._test_repo_container = test_repo_container
480 self._test_repo_container = test_repo_container
479 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
480 # Fixture will survive in the end.
482 # Fixture will survive in the end.
481 self._fixture = Fixture()
483 self._fixture = Fixture()
482
484
483 def __getitem__(self, key):
485 def __getitem__(self, key):
484 return self._test_repo_container(key, self.alias)
486 return self._test_repo_container(key, self.alias)
485
487
486 def create_test_repo(self, key, config=None):
488 def create_test_repo(self, key, config=None):
487 return self._test_repo_container(key, self.alias, config)
489 return self._test_repo_container(key, self.alias, config)
488
490
489 @property
491 @property
490 def repo(self):
492 def repo(self):
491 """
493 """
492 Returns the "current" repository. This is the vcs_test repo or the
494 Returns the "current" repository. This is the vcs_test repo or the
493 last repo which has been created with `create_repo`.
495 last repo which has been created with `create_repo`.
494 """
496 """
495 from rhodecode.model.db import Repository
497 from rhodecode.model.db import Repository
496 return Repository.get_by_repo_name(self.repo_name)
498 return Repository.get_by_repo_name(self.repo_name)
497
499
498 @property
500 @property
499 def default_branch_name(self):
501 def default_branch_name(self):
500 VcsRepository = get_backend(self.alias)
502 VcsRepository = get_backend(self.alias)
501 return VcsRepository.DEFAULT_BRANCH_NAME
503 return VcsRepository.DEFAULT_BRANCH_NAME
502
504
503 @property
505 @property
504 def default_head_id(self):
506 def default_head_id(self):
505 """
507 """
506 Returns the default head id of the underlying backend.
508 Returns the default head id of the underlying backend.
507
509
508 This will be the default branch name in case the backend does have a
510 This will be the default branch name in case the backend does have a
509 default branch. In the other cases it will point to a valid head
511 default branch. In the other cases it will point to a valid head
510 which can serve as the base to create a new commit on top of it.
512 which can serve as the base to create a new commit on top of it.
511 """
513 """
512 vcsrepo = self.repo.scm_instance()
514 vcsrepo = self.repo.scm_instance()
513 head_id = (
515 head_id = (
514 vcsrepo.DEFAULT_BRANCH_NAME or
516 vcsrepo.DEFAULT_BRANCH_NAME or
515 vcsrepo.commit_ids[-1])
517 vcsrepo.commit_ids[-1])
516 return head_id
518 return head_id
517
519
518 @property
520 @property
519 def commit_ids(self):
521 def commit_ids(self):
520 """
522 """
521 Returns the list of commits for the last created repository
523 Returns the list of commits for the last created repository
522 """
524 """
523 return self._commit_ids
525 return self._commit_ids
524
526
525 def create_master_repo(self, commits):
527 def create_master_repo(self, commits):
526 """
528 """
527 Create a repository and remember it as a template.
529 Create a repository and remember it as a template.
528
530
529 This allows to easily create derived repositories to construct
531 This allows to easily create derived repositories to construct
530 more complex scenarios for diff, compare and pull requests.
532 more complex scenarios for diff, compare and pull requests.
531
533
532 Returns a commit map which maps from commit message to raw_id.
534 Returns a commit map which maps from commit message to raw_id.
533 """
535 """
534 self._master_repo = self.create_repo(commits=commits)
536 self._master_repo = self.create_repo(commits=commits)
535 self._master_repo_path = self._master_repo.repo_full_path
537 self._master_repo_path = self._master_repo.repo_full_path
536
538
537 return self._commit_ids
539 return self._commit_ids
538
540
539 def create_repo(
541 def create_repo(
540 self, commits=None, number_of_commits=0, heads=None,
542 self, commits=None, number_of_commits=0, heads=None,
541 name_suffix=u'', bare=False, **kwargs):
543 name_suffix=u'', bare=False, **kwargs):
542 """
544 """
543 Create a repository and record it for later cleanup.
545 Create a repository and record it for later cleanup.
544
546
545 :param commits: Optional. A sequence of dict instances.
547 :param commits: Optional. A sequence of dict instances.
546 Will add a commit per entry to the new repository.
548 Will add a commit per entry to the new repository.
547 :param number_of_commits: Optional. If set to a number, this number of
549 :param number_of_commits: Optional. If set to a number, this number of
548 commits will be added to the new repository.
550 commits will be added to the new repository.
549 :param heads: Optional. Can be set to a sequence of of commit
551 :param heads: Optional. Can be set to a sequence of of commit
550 names which shall be pulled in from the master repository.
552 names which shall be pulled in from the master repository.
551 :param name_suffix: adds special suffix to generated repo name
553 :param name_suffix: adds special suffix to generated repo name
552 :param bare: set a repo as bare (no checkout)
554 :param bare: set a repo as bare (no checkout)
553 """
555 """
554 self.repo_name = self._next_repo_name() + name_suffix
556 self.repo_name = self._next_repo_name() + name_suffix
555 repo = self._fixture.create_repo(
557 repo = self._fixture.create_repo(
556 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
558 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
557 self._cleanup_repos.append(repo.repo_name)
559 self._cleanup_repos.append(repo.repo_name)
558
560
559 commits = commits or [
561 commits = commits or [
560 {'message': 'Commit %s of %s' % (x, self.repo_name)}
562 {'message': 'Commit %s of %s' % (x, self.repo_name)}
561 for x in range(number_of_commits)]
563 for x in range(number_of_commits)]
562 vcs_repo = repo.scm_instance()
564 vcs_repo = repo.scm_instance()
563 vcs_repo.count()
565 vcs_repo.count()
564 self._add_commits_to_repo(vcs_repo, commits)
566 self._add_commits_to_repo(vcs_repo, commits)
565 if heads:
567 if heads:
566 self.pull_heads(repo, heads)
568 self.pull_heads(repo, heads)
567
569
568 return repo
570 return repo
569
571
570 def pull_heads(self, repo, heads):
572 def pull_heads(self, repo, heads):
571 """
573 """
572 Make sure that repo contains all commits mentioned in `heads`
574 Make sure that repo contains all commits mentioned in `heads`
573 """
575 """
574 vcsrepo = repo.scm_instance()
576 vcsrepo = repo.scm_instance()
575 vcsrepo.config.clear_section('hooks')
577 vcsrepo.config.clear_section('hooks')
576 commit_ids = [self._commit_ids[h] for h in heads]
578 commit_ids = [self._commit_ids[h] for h in heads]
577 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
579 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
578
580
579 def create_fork(self):
581 def create_fork(self):
580 repo_to_fork = self.repo_name
582 repo_to_fork = self.repo_name
581 self.repo_name = self._next_repo_name()
583 self.repo_name = self._next_repo_name()
582 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
584 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
583 self._cleanup_repos.append(self.repo_name)
585 self._cleanup_repos.append(self.repo_name)
584 return repo
586 return repo
585
587
586 def new_repo_name(self, suffix=u''):
588 def new_repo_name(self, suffix=u''):
587 self.repo_name = self._next_repo_name() + suffix
589 self.repo_name = self._next_repo_name() + suffix
588 self._cleanup_repos.append(self.repo_name)
590 self._cleanup_repos.append(self.repo_name)
589 return self.repo_name
591 return self.repo_name
590
592
591 def _next_repo_name(self):
593 def _next_repo_name(self):
592 return u"%s_%s" % (
594 return u"%s_%s" % (
593 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
595 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
594
596
595 def ensure_file(self, filename, content='Test content\n'):
597 def ensure_file(self, filename, content='Test content\n'):
596 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
598 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
597 commits = [
599 commits = [
598 {'added': [
600 {'added': [
599 FileNode(filename, content=content),
601 FileNode(filename, content=content),
600 ]},
602 ]},
601 ]
603 ]
602 self._add_commits_to_repo(self.repo.scm_instance(), commits)
604 self._add_commits_to_repo(self.repo.scm_instance(), commits)
603
605
604 def enable_downloads(self):
606 def enable_downloads(self):
605 repo = self.repo
607 repo = self.repo
606 repo.enable_downloads = True
608 repo.enable_downloads = True
607 Session().add(repo)
609 Session().add(repo)
608 Session().commit()
610 Session().commit()
609
611
610 def cleanup(self):
612 def cleanup(self):
611 for repo_name in reversed(self._cleanup_repos):
613 for repo_name in reversed(self._cleanup_repos):
612 self._fixture.destroy_repo(repo_name)
614 self._fixture.destroy_repo(repo_name)
613
615
614 def _add_commits_to_repo(self, repo, commits):
616 def _add_commits_to_repo(self, repo, commits):
615 commit_ids = _add_commits_to_repo(repo, commits)
617 commit_ids = _add_commits_to_repo(repo, commits)
616 if not commit_ids:
618 if not commit_ids:
617 return
619 return
618 self._commit_ids = commit_ids
620 self._commit_ids = commit_ids
619
621
620 # Creating refs for Git to allow fetching them from remote repository
622 # Creating refs for Git to allow fetching them from remote repository
621 if self.alias == 'git':
623 if self.alias == 'git':
622 refs = {}
624 refs = {}
623 for message in self._commit_ids:
625 for message in self._commit_ids:
624 # TODO: mikhail: do more special chars replacements
626 # TODO: mikhail: do more special chars replacements
625 ref_name = 'refs/test-refs/{}'.format(
627 ref_name = 'refs/test-refs/{}'.format(
626 message.replace(' ', ''))
628 message.replace(' ', ''))
627 refs[ref_name] = self._commit_ids[message]
629 refs[ref_name] = self._commit_ids[message]
628 self._create_refs(repo, refs)
630 self._create_refs(repo, refs)
629
631
630 def _create_refs(self, repo, refs):
632 def _create_refs(self, repo, refs):
631 for ref_name in refs:
633 for ref_name in refs:
632 repo.set_refs(ref_name, refs[ref_name])
634 repo.set_refs(ref_name, refs[ref_name])
633
635
634
636
635 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
637 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
636 if backend_alias not in request.config.getoption('--backends'):
638 if backend_alias not in request.config.getoption('--backends'):
637 pytest.skip("Backend %s not selected." % (backend_alias, ))
639 pytest.skip("Backend %s not selected." % (backend_alias, ))
638
640
639 utils.check_xfail_backends(request.node, backend_alias)
641 utils.check_xfail_backends(request.node, backend_alias)
640 utils.check_skip_backends(request.node, backend_alias)
642 utils.check_skip_backends(request.node, backend_alias)
641
643
642 repo_name = 'vcs_test_%s' % (backend_alias, )
644 repo_name = 'vcs_test_%s' % (backend_alias, )
643 repo_path = os.path.join(tests_tmp_path, repo_name)
645 repo_path = os.path.join(tests_tmp_path, repo_name)
644 backend = VcsBackend(
646 backend = VcsBackend(
645 alias=backend_alias,
647 alias=backend_alias,
646 repo_path=repo_path,
648 repo_path=repo_path,
647 test_name=request.node.name,
649 test_name=request.node.name,
648 test_repo_container=test_repo)
650 test_repo_container=test_repo)
649 request.addfinalizer(backend.cleanup)
651 request.addfinalizer(backend.cleanup)
650 return backend
652 return backend
651
653
652
654
653 @pytest.fixture()
655 @pytest.fixture()
654 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
656 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
655 """
657 """
656 Parametrized fixture which represents a single vcs backend implementation.
658 Parametrized fixture which represents a single vcs backend implementation.
657
659
658 See the fixture `backend` for more details. This one implements the same
660 See the fixture `backend` for more details. This one implements the same
659 concept, but on vcs level. So it does not provide model instances etc.
661 concept, but on vcs level. So it does not provide model instances etc.
660
662
661 Parameters are generated dynamically, see :func:`pytest_generate_tests`
663 Parameters are generated dynamically, see :func:`pytest_generate_tests`
662 for how this works.
664 for how this works.
663 """
665 """
664 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
666 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
665
667
666
668
667 @pytest.fixture()
669 @pytest.fixture()
668 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
670 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
669 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
671 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
670
672
671
673
672 @pytest.fixture()
674 @pytest.fixture()
673 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
675 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
674 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
676 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
675
677
676
678
677 @pytest.fixture()
679 @pytest.fixture()
678 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
680 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
679 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
681 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
680
682
681
683
682 @pytest.fixture()
684 @pytest.fixture()
683 def vcsbackend_stub(vcsbackend_git):
685 def vcsbackend_stub(vcsbackend_git):
684 """
686 """
685 Use this to express that your test just needs a stub of a vcsbackend.
687 Use this to express that your test just needs a stub of a vcsbackend.
686
688
687 Plan is to eventually implement an in-memory stub to speed tests up.
689 Plan is to eventually implement an in-memory stub to speed tests up.
688 """
690 """
689 return vcsbackend_git
691 return vcsbackend_git
690
692
691
693
692 class VcsBackend(object):
694 class VcsBackend(object):
693 """
695 """
694 Represents the test configuration for one supported vcs backend.
696 Represents the test configuration for one supported vcs backend.
695 """
697 """
696
698
697 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
699 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
698
700
699 def __init__(self, alias, repo_path, test_name, test_repo_container):
701 def __init__(self, alias, repo_path, test_name, test_repo_container):
700 self.alias = alias
702 self.alias = alias
701 self._repo_path = repo_path
703 self._repo_path = repo_path
702 self._cleanup_repos = []
704 self._cleanup_repos = []
703 self._test_name = test_name
705 self._test_name = test_name
704 self._test_repo_container = test_repo_container
706 self._test_repo_container = test_repo_container
705
707
706 def __getitem__(self, key):
708 def __getitem__(self, key):
707 return self._test_repo_container(key, self.alias).scm_instance()
709 return self._test_repo_container(key, self.alias).scm_instance()
708
710
709 @property
711 @property
710 def repo(self):
712 def repo(self):
711 """
713 """
712 Returns the "current" repository. This is the vcs_test repo of the last
714 Returns the "current" repository. This is the vcs_test repo of the last
713 repo which has been created.
715 repo which has been created.
714 """
716 """
715 Repository = get_backend(self.alias)
717 Repository = get_backend(self.alias)
716 return Repository(self._repo_path)
718 return Repository(self._repo_path)
717
719
718 @property
720 @property
719 def backend(self):
721 def backend(self):
720 """
722 """
721 Returns the backend implementation class.
723 Returns the backend implementation class.
722 """
724 """
723 return get_backend(self.alias)
725 return get_backend(self.alias)
724
726
725 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
727 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
726 bare=False):
728 bare=False):
727 repo_name = self._next_repo_name()
729 repo_name = self._next_repo_name()
728 self._repo_path = get_new_dir(repo_name)
730 self._repo_path = get_new_dir(repo_name)
729 repo_class = get_backend(self.alias)
731 repo_class = get_backend(self.alias)
730 src_url = None
732 src_url = None
731 if _clone_repo:
733 if _clone_repo:
732 src_url = _clone_repo.path
734 src_url = _clone_repo.path
733 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
735 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
734 self._cleanup_repos.append(repo)
736 self._cleanup_repos.append(repo)
735
737
736 commits = commits or [
738 commits = commits or [
737 {'message': 'Commit %s of %s' % (x, repo_name)}
739 {'message': 'Commit %s of %s' % (x, repo_name)}
738 for x in range(number_of_commits)]
740 for x in range(number_of_commits)]
739 _add_commits_to_repo(repo, commits)
741 _add_commits_to_repo(repo, commits)
740 return repo
742 return repo
741
743
742 def clone_repo(self, repo):
744 def clone_repo(self, repo):
743 return self.create_repo(_clone_repo=repo)
745 return self.create_repo(_clone_repo=repo)
744
746
745 def cleanup(self):
747 def cleanup(self):
746 for repo in self._cleanup_repos:
748 for repo in self._cleanup_repos:
747 shutil.rmtree(repo.path)
749 shutil.rmtree(repo.path)
748
750
749 def new_repo_path(self):
751 def new_repo_path(self):
750 repo_name = self._next_repo_name()
752 repo_name = self._next_repo_name()
751 self._repo_path = get_new_dir(repo_name)
753 self._repo_path = get_new_dir(repo_name)
752 return self._repo_path
754 return self._repo_path
753
755
754 def _next_repo_name(self):
756 def _next_repo_name(self):
755 return "%s_%s" % (
757 return "%s_%s" % (
756 self.invalid_repo_name.sub('_', self._test_name),
758 self.invalid_repo_name.sub('_', self._test_name),
757 len(self._cleanup_repos))
759 len(self._cleanup_repos))
758
760
759 def add_file(self, repo, filename, content='Test content\n'):
761 def add_file(self, repo, filename, content='Test content\n'):
760 imc = repo.in_memory_commit
762 imc = repo.in_memory_commit
761 imc.add(FileNode(filename, content=content))
763 imc.add(FileNode(filename, content=content))
762 imc.commit(
764 imc.commit(
763 message=u'Automatic commit from vcsbackend fixture',
765 message=u'Automatic commit from vcsbackend fixture',
764 author=u'Automatic <automatic@rhodecode.com>')
766 author=u'Automatic <automatic@rhodecode.com>')
765
767
766 def ensure_file(self, filename, content='Test content\n'):
768 def ensure_file(self, filename, content='Test content\n'):
767 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
769 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
768 self.add_file(self.repo, filename, content)
770 self.add_file(self.repo, filename, content)
769
771
770
772
771 def _add_commits_to_repo(vcs_repo, commits):
773 def _add_commits_to_repo(vcs_repo, commits):
772 commit_ids = {}
774 commit_ids = {}
773 if not commits:
775 if not commits:
774 return commit_ids
776 return commit_ids
775
777
776 imc = vcs_repo.in_memory_commit
778 imc = vcs_repo.in_memory_commit
777 commit = None
779 commit = None
778
780
779 for idx, commit in enumerate(commits):
781 for idx, commit in enumerate(commits):
780 message = str(commit.get('message', 'Commit %s' % idx))
782 message = str(commit.get('message', 'Commit %s' % idx))
781
783
782 for node in commit.get('added', []):
784 for node in commit.get('added', []):
783 imc.add(FileNode(node.path, content=node.content))
785 imc.add(FileNode(node.path, content=node.content))
784 for node in commit.get('changed', []):
786 for node in commit.get('changed', []):
785 imc.change(FileNode(node.path, content=node.content))
787 imc.change(FileNode(node.path, content=node.content))
786 for node in commit.get('removed', []):
788 for node in commit.get('removed', []):
787 imc.remove(FileNode(node.path))
789 imc.remove(FileNode(node.path))
788
790
789 parents = [
791 parents = [
790 vcs_repo.get_commit(commit_id=commit_ids[p])
792 vcs_repo.get_commit(commit_id=commit_ids[p])
791 for p in commit.get('parents', [])]
793 for p in commit.get('parents', [])]
792
794
793 operations = ('added', 'changed', 'removed')
795 operations = ('added', 'changed', 'removed')
794 if not any((commit.get(o) for o in operations)):
796 if not any((commit.get(o) for o in operations)):
795 imc.add(FileNode('file_%s' % idx, content=message))
797 imc.add(FileNode('file_%s' % idx, content=message))
796
798
797 commit = imc.commit(
799 commit = imc.commit(
798 message=message,
800 message=message,
799 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
801 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
800 date=commit.get('date'),
802 date=commit.get('date'),
801 branch=commit.get('branch'),
803 branch=commit.get('branch'),
802 parents=parents)
804 parents=parents)
803
805
804 commit_ids[commit.message] = commit.raw_id
806 commit_ids[commit.message] = commit.raw_id
805
807
806 return commit_ids
808 return commit_ids
807
809
808
810
809 @pytest.fixture()
811 @pytest.fixture()
810 def reposerver(request):
812 def reposerver(request):
811 """
813 """
812 Allows to serve a backend repository
814 Allows to serve a backend repository
813 """
815 """
814
816
815 repo_server = RepoServer()
817 repo_server = RepoServer()
816 request.addfinalizer(repo_server.cleanup)
818 request.addfinalizer(repo_server.cleanup)
817 return repo_server
819 return repo_server
818
820
819
821
820 class RepoServer(object):
822 class RepoServer(object):
821 """
823 """
822 Utility to serve a local repository for the duration of a test case.
824 Utility to serve a local repository for the duration of a test case.
823
825
824 Supports only Subversion so far.
826 Supports only Subversion so far.
825 """
827 """
826
828
827 url = None
829 url = None
828
830
829 def __init__(self):
831 def __init__(self):
830 self._cleanup_servers = []
832 self._cleanup_servers = []
831
833
832 def serve(self, vcsrepo):
834 def serve(self, vcsrepo):
833 if vcsrepo.alias != 'svn':
835 if vcsrepo.alias != 'svn':
834 raise TypeError("Backend %s not supported" % vcsrepo.alias)
836 raise TypeError("Backend %s not supported" % vcsrepo.alias)
835
837
836 proc = subprocess.Popen(
838 proc = subprocess.Popen(
837 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
839 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
838 '--root', vcsrepo.path])
840 '--root', vcsrepo.path])
839 self._cleanup_servers.append(proc)
841 self._cleanup_servers.append(proc)
840 self.url = 'svn://localhost'
842 self.url = 'svn://localhost'
841
843
842 def cleanup(self):
844 def cleanup(self):
843 for proc in self._cleanup_servers:
845 for proc in self._cleanup_servers:
844 proc.terminate()
846 proc.terminate()
845
847
846
848
847 @pytest.fixture()
849 @pytest.fixture()
848 def pr_util(backend, request, config_stub):
850 def pr_util(backend, request, config_stub):
849 """
851 """
850 Utility for tests of models and for functional tests around pull requests.
852 Utility for tests of models and for functional tests around pull requests.
851
853
852 It gives an instance of :class:`PRTestUtility` which provides various
854 It gives an instance of :class:`PRTestUtility` which provides various
853 utility methods around one pull request.
855 utility methods around one pull request.
854
856
855 This fixture uses `backend` and inherits its parameterization.
857 This fixture uses `backend` and inherits its parameterization.
856 """
858 """
857
859
858 util = PRTestUtility(backend)
860 util = PRTestUtility(backend)
859 request.addfinalizer(util.cleanup)
861 request.addfinalizer(util.cleanup)
860
862
861 return util
863 return util
862
864
863
865
864 class PRTestUtility(object):
866 class PRTestUtility(object):
865
867
866 pull_request = None
868 pull_request = None
867 pull_request_id = None
869 pull_request_id = None
868 mergeable_patcher = None
870 mergeable_patcher = None
869 mergeable_mock = None
871 mergeable_mock = None
870 notification_patcher = None
872 notification_patcher = None
871
873
872 def __init__(self, backend):
874 def __init__(self, backend):
873 self.backend = backend
875 self.backend = backend
874
876
875 def create_pull_request(
877 def create_pull_request(
876 self, commits=None, target_head=None, source_head=None,
878 self, commits=None, target_head=None, source_head=None,
877 revisions=None, approved=False, author=None, mergeable=False,
879 revisions=None, approved=False, author=None, mergeable=False,
878 enable_notifications=True, name_suffix=u'', reviewers=None, observers=None,
880 enable_notifications=True, name_suffix=u'', reviewers=None, observers=None,
879 title=u"Test", description=u"Description"):
881 title=u"Test", description=u"Description"):
880 self.set_mergeable(mergeable)
882 self.set_mergeable(mergeable)
881 if not enable_notifications:
883 if not enable_notifications:
882 # mock notification side effect
884 # mock notification side effect
883 self.notification_patcher = mock.patch(
885 self.notification_patcher = mock.patch(
884 'rhodecode.model.notification.NotificationModel.create')
886 'rhodecode.model.notification.NotificationModel.create')
885 self.notification_patcher.start()
887 self.notification_patcher.start()
886
888
887 if not self.pull_request:
889 if not self.pull_request:
888 if not commits:
890 if not commits:
889 commits = [
891 commits = [
890 {'message': 'c1'},
892 {'message': 'c1'},
891 {'message': 'c2'},
893 {'message': 'c2'},
892 {'message': 'c3'},
894 {'message': 'c3'},
893 ]
895 ]
894 target_head = 'c1'
896 target_head = 'c1'
895 source_head = 'c2'
897 source_head = 'c2'
896 revisions = ['c2']
898 revisions = ['c2']
897
899
898 self.commit_ids = self.backend.create_master_repo(commits)
900 self.commit_ids = self.backend.create_master_repo(commits)
899 self.target_repository = self.backend.create_repo(
901 self.target_repository = self.backend.create_repo(
900 heads=[target_head], name_suffix=name_suffix)
902 heads=[target_head], name_suffix=name_suffix)
901 self.source_repository = self.backend.create_repo(
903 self.source_repository = self.backend.create_repo(
902 heads=[source_head], name_suffix=name_suffix)
904 heads=[source_head], name_suffix=name_suffix)
903 self.author = author or UserModel().get_by_username(
905 self.author = author or UserModel().get_by_username(
904 TEST_USER_ADMIN_LOGIN)
906 TEST_USER_ADMIN_LOGIN)
905
907
906 model = PullRequestModel()
908 model = PullRequestModel()
907 self.create_parameters = {
909 self.create_parameters = {
908 'created_by': self.author,
910 'created_by': self.author,
909 'source_repo': self.source_repository.repo_name,
911 'source_repo': self.source_repository.repo_name,
910 'source_ref': self._default_branch_reference(source_head),
912 'source_ref': self._default_branch_reference(source_head),
911 'target_repo': self.target_repository.repo_name,
913 'target_repo': self.target_repository.repo_name,
912 'target_ref': self._default_branch_reference(target_head),
914 'target_ref': self._default_branch_reference(target_head),
913 'revisions': [self.commit_ids[r] for r in revisions],
915 'revisions': [self.commit_ids[r] for r in revisions],
914 'reviewers': reviewers or self._get_reviewers(),
916 'reviewers': reviewers or self._get_reviewers(),
915 'observers': observers or self._get_observers(),
917 'observers': observers or self._get_observers(),
916 'title': title,
918 'title': title,
917 'description': description,
919 'description': description,
918 }
920 }
919 self.pull_request = model.create(**self.create_parameters)
921 self.pull_request = model.create(**self.create_parameters)
920 assert model.get_versions(self.pull_request) == []
922 assert model.get_versions(self.pull_request) == []
921
923
922 self.pull_request_id = self.pull_request.pull_request_id
924 self.pull_request_id = self.pull_request.pull_request_id
923
925
924 if approved:
926 if approved:
925 self.approve()
927 self.approve()
926
928
927 Session().add(self.pull_request)
929 Session().add(self.pull_request)
928 Session().commit()
930 Session().commit()
929
931
930 return self.pull_request
932 return self.pull_request
931
933
932 def approve(self):
934 def approve(self):
933 self.create_status_votes(
935 self.create_status_votes(
934 ChangesetStatus.STATUS_APPROVED,
936 ChangesetStatus.STATUS_APPROVED,
935 *self.pull_request.reviewers)
937 *self.pull_request.reviewers)
936
938
937 def close(self):
939 def close(self):
938 PullRequestModel().close_pull_request(self.pull_request, self.author)
940 PullRequestModel().close_pull_request(self.pull_request, self.author)
939
941
940 def _default_branch_reference(self, commit_message):
942 def _default_branch_reference(self, commit_message):
941 reference = '%s:%s:%s' % (
943 reference = '%s:%s:%s' % (
942 'branch',
944 'branch',
943 self.backend.default_branch_name,
945 self.backend.default_branch_name,
944 self.commit_ids[commit_message])
946 self.commit_ids[commit_message])
945 return reference
947 return reference
946
948
947 def _get_reviewers(self):
949 def _get_reviewers(self):
948 role = PullRequestReviewers.ROLE_REVIEWER
950 role = PullRequestReviewers.ROLE_REVIEWER
949 return [
951 return [
950 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
952 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
951 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
953 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
952 ]
954 ]
953
955
954 def _get_observers(self):
956 def _get_observers(self):
955 return [
957 return [
956
958
957 ]
959 ]
958
960
959 def update_source_repository(self, head=None):
961 def update_source_repository(self, head=None):
960 heads = [head or 'c3']
962 heads = [head or 'c3']
961 self.backend.pull_heads(self.source_repository, heads=heads)
963 self.backend.pull_heads(self.source_repository, heads=heads)
962
964
963 def add_one_commit(self, head=None):
965 def add_one_commit(self, head=None):
964 self.update_source_repository(head=head)
966 self.update_source_repository(head=head)
965 old_commit_ids = set(self.pull_request.revisions)
967 old_commit_ids = set(self.pull_request.revisions)
966 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
968 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
967 commit_ids = set(self.pull_request.revisions)
969 commit_ids = set(self.pull_request.revisions)
968 new_commit_ids = commit_ids - old_commit_ids
970 new_commit_ids = commit_ids - old_commit_ids
969 assert len(new_commit_ids) == 1
971 assert len(new_commit_ids) == 1
970 return new_commit_ids.pop()
972 return new_commit_ids.pop()
971
973
972 def remove_one_commit(self):
974 def remove_one_commit(self):
973 assert len(self.pull_request.revisions) == 2
975 assert len(self.pull_request.revisions) == 2
974 source_vcs = self.source_repository.scm_instance()
976 source_vcs = self.source_repository.scm_instance()
975 removed_commit_id = source_vcs.commit_ids[-1]
977 removed_commit_id = source_vcs.commit_ids[-1]
976
978
977 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
979 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
978 # remove the if once that's sorted out.
980 # remove the if once that's sorted out.
979 if self.backend.alias == "git":
981 if self.backend.alias == "git":
980 kwargs = {'branch_name': self.backend.default_branch_name}
982 kwargs = {'branch_name': self.backend.default_branch_name}
981 else:
983 else:
982 kwargs = {}
984 kwargs = {}
983 source_vcs.strip(removed_commit_id, **kwargs)
985 source_vcs.strip(removed_commit_id, **kwargs)
984
986
985 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
987 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
986 assert len(self.pull_request.revisions) == 1
988 assert len(self.pull_request.revisions) == 1
987 return removed_commit_id
989 return removed_commit_id
988
990
989 def create_comment(self, linked_to=None):
991 def create_comment(self, linked_to=None):
990 comment = CommentsModel().create(
992 comment = CommentsModel().create(
991 text=u"Test comment",
993 text=u"Test comment",
992 repo=self.target_repository.repo_name,
994 repo=self.target_repository.repo_name,
993 user=self.author,
995 user=self.author,
994 pull_request=self.pull_request)
996 pull_request=self.pull_request)
995 assert comment.pull_request_version_id is None
997 assert comment.pull_request_version_id is None
996
998
997 if linked_to:
999 if linked_to:
998 PullRequestModel()._link_comments_to_version(linked_to)
1000 PullRequestModel()._link_comments_to_version(linked_to)
999
1001
1000 return comment
1002 return comment
1001
1003
1002 def create_inline_comment(
1004 def create_inline_comment(
1003 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1005 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1004 comment = CommentsModel().create(
1006 comment = CommentsModel().create(
1005 text=u"Test comment",
1007 text=u"Test comment",
1006 repo=self.target_repository.repo_name,
1008 repo=self.target_repository.repo_name,
1007 user=self.author,
1009 user=self.author,
1008 line_no=line_no,
1010 line_no=line_no,
1009 f_path=file_path,
1011 f_path=file_path,
1010 pull_request=self.pull_request)
1012 pull_request=self.pull_request)
1011 assert comment.pull_request_version_id is None
1013 assert comment.pull_request_version_id is None
1012
1014
1013 if linked_to:
1015 if linked_to:
1014 PullRequestModel()._link_comments_to_version(linked_to)
1016 PullRequestModel()._link_comments_to_version(linked_to)
1015
1017
1016 return comment
1018 return comment
1017
1019
1018 def create_version_of_pull_request(self):
1020 def create_version_of_pull_request(self):
1019 pull_request = self.create_pull_request()
1021 pull_request = self.create_pull_request()
1020 version = PullRequestModel()._create_version_from_snapshot(
1022 version = PullRequestModel()._create_version_from_snapshot(
1021 pull_request)
1023 pull_request)
1022 return version
1024 return version
1023
1025
1024 def create_status_votes(self, status, *reviewers):
1026 def create_status_votes(self, status, *reviewers):
1025 for reviewer in reviewers:
1027 for reviewer in reviewers:
1026 ChangesetStatusModel().set_status(
1028 ChangesetStatusModel().set_status(
1027 repo=self.pull_request.target_repo,
1029 repo=self.pull_request.target_repo,
1028 status=status,
1030 status=status,
1029 user=reviewer.user_id,
1031 user=reviewer.user_id,
1030 pull_request=self.pull_request)
1032 pull_request=self.pull_request)
1031
1033
1032 def set_mergeable(self, value):
1034 def set_mergeable(self, value):
1033 if not self.mergeable_patcher:
1035 if not self.mergeable_patcher:
1034 self.mergeable_patcher = mock.patch.object(
1036 self.mergeable_patcher = mock.patch.object(
1035 VcsSettingsModel, 'get_general_settings')
1037 VcsSettingsModel, 'get_general_settings')
1036 self.mergeable_mock = self.mergeable_patcher.start()
1038 self.mergeable_mock = self.mergeable_patcher.start()
1037 self.mergeable_mock.return_value = {
1039 self.mergeable_mock.return_value = {
1038 'rhodecode_pr_merge_enabled': value}
1040 'rhodecode_pr_merge_enabled': value}
1039
1041
1040 def cleanup(self):
1042 def cleanup(self):
1041 # In case the source repository is already cleaned up, the pull
1043 # In case the source repository is already cleaned up, the pull
1042 # request will already be deleted.
1044 # request will already be deleted.
1043 pull_request = PullRequest().get(self.pull_request_id)
1045 pull_request = PullRequest().get(self.pull_request_id)
1044 if pull_request:
1046 if pull_request:
1045 PullRequestModel().delete(pull_request, pull_request.author)
1047 PullRequestModel().delete(pull_request, pull_request.author)
1046 Session().commit()
1048 Session().commit()
1047
1049
1048 if self.notification_patcher:
1050 if self.notification_patcher:
1049 self.notification_patcher.stop()
1051 self.notification_patcher.stop()
1050
1052
1051 if self.mergeable_patcher:
1053 if self.mergeable_patcher:
1052 self.mergeable_patcher.stop()
1054 self.mergeable_patcher.stop()
1053
1055
1054
1056
1055 @pytest.fixture()
1057 @pytest.fixture()
1056 def user_admin(baseapp):
1058 def user_admin(baseapp):
1057 """
1059 """
1058 Provides the default admin test user as an instance of `db.User`.
1060 Provides the default admin test user as an instance of `db.User`.
1059 """
1061 """
1060 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1062 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1061 return user
1063 return user
1062
1064
1063
1065
1064 @pytest.fixture()
1066 @pytest.fixture()
1065 def user_regular(baseapp):
1067 def user_regular(baseapp):
1066 """
1068 """
1067 Provides the default regular test user as an instance of `db.User`.
1069 Provides the default regular test user as an instance of `db.User`.
1068 """
1070 """
1069 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1071 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1070 return user
1072 return user
1071
1073
1072
1074
1073 @pytest.fixture()
1075 @pytest.fixture()
1074 def user_util(request, db_connection):
1076 def user_util(request, db_connection):
1075 """
1077 """
1076 Provides a wired instance of `UserUtility` with integrated cleanup.
1078 Provides a wired instance of `UserUtility` with integrated cleanup.
1077 """
1079 """
1078 utility = UserUtility(test_name=request.node.name)
1080 utility = UserUtility(test_name=request.node.name)
1079 request.addfinalizer(utility.cleanup)
1081 request.addfinalizer(utility.cleanup)
1080 return utility
1082 return utility
1081
1083
1082
1084
1083 # TODO: johbo: Split this up into utilities per domain or something similar
1085 # TODO: johbo: Split this up into utilities per domain or something similar
1084 class UserUtility(object):
1086 class UserUtility(object):
1085
1087
1086 def __init__(self, test_name="test"):
1088 def __init__(self, test_name="test"):
1087 self._test_name = self._sanitize_name(test_name)
1089 self._test_name = self._sanitize_name(test_name)
1088 self.fixture = Fixture()
1090 self.fixture = Fixture()
1089 self.repo_group_ids = []
1091 self.repo_group_ids = []
1090 self.repos_ids = []
1092 self.repos_ids = []
1091 self.user_ids = []
1093 self.user_ids = []
1092 self.user_group_ids = []
1094 self.user_group_ids = []
1093 self.user_repo_permission_ids = []
1095 self.user_repo_permission_ids = []
1094 self.user_group_repo_permission_ids = []
1096 self.user_group_repo_permission_ids = []
1095 self.user_repo_group_permission_ids = []
1097 self.user_repo_group_permission_ids = []
1096 self.user_group_repo_group_permission_ids = []
1098 self.user_group_repo_group_permission_ids = []
1097 self.user_user_group_permission_ids = []
1099 self.user_user_group_permission_ids = []
1098 self.user_group_user_group_permission_ids = []
1100 self.user_group_user_group_permission_ids = []
1099 self.user_permissions = []
1101 self.user_permissions = []
1100
1102
1101 def _sanitize_name(self, name):
1103 def _sanitize_name(self, name):
1102 for char in ['[', ']']:
1104 for char in ['[', ']']:
1103 name = name.replace(char, '_')
1105 name = name.replace(char, '_')
1104 return name
1106 return name
1105
1107
1106 def create_repo_group(
1108 def create_repo_group(
1107 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1109 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1108 group_name = "{prefix}_repogroup_{count}".format(
1110 group_name = "{prefix}_repogroup_{count}".format(
1109 prefix=self._test_name,
1111 prefix=self._test_name,
1110 count=len(self.repo_group_ids))
1112 count=len(self.repo_group_ids))
1111 repo_group = self.fixture.create_repo_group(
1113 repo_group = self.fixture.create_repo_group(
1112 group_name, cur_user=owner)
1114 group_name, cur_user=owner)
1113 if auto_cleanup:
1115 if auto_cleanup:
1114 self.repo_group_ids.append(repo_group.group_id)
1116 self.repo_group_ids.append(repo_group.group_id)
1115 return repo_group
1117 return repo_group
1116
1118
1117 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1119 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1118 auto_cleanup=True, repo_type='hg', bare=False):
1120 auto_cleanup=True, repo_type='hg', bare=False):
1119 repo_name = "{prefix}_repository_{count}".format(
1121 repo_name = "{prefix}_repository_{count}".format(
1120 prefix=self._test_name,
1122 prefix=self._test_name,
1121 count=len(self.repos_ids))
1123 count=len(self.repos_ids))
1122
1124
1123 repository = self.fixture.create_repo(
1125 repository = self.fixture.create_repo(
1124 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1126 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1125 if auto_cleanup:
1127 if auto_cleanup:
1126 self.repos_ids.append(repository.repo_id)
1128 self.repos_ids.append(repository.repo_id)
1127 return repository
1129 return repository
1128
1130
1129 def create_user(self, auto_cleanup=True, **kwargs):
1131 def create_user(self, auto_cleanup=True, **kwargs):
1130 user_name = "{prefix}_user_{count}".format(
1132 user_name = "{prefix}_user_{count}".format(
1131 prefix=self._test_name,
1133 prefix=self._test_name,
1132 count=len(self.user_ids))
1134 count=len(self.user_ids))
1133 user = self.fixture.create_user(user_name, **kwargs)
1135 user = self.fixture.create_user(user_name, **kwargs)
1134 if auto_cleanup:
1136 if auto_cleanup:
1135 self.user_ids.append(user.user_id)
1137 self.user_ids.append(user.user_id)
1136 return user
1138 return user
1137
1139
1138 def create_additional_user_email(self, user, email):
1140 def create_additional_user_email(self, user, email):
1139 uem = self.fixture.create_additional_user_email(user=user, email=email)
1141 uem = self.fixture.create_additional_user_email(user=user, email=email)
1140 return uem
1142 return uem
1141
1143
1142 def create_user_with_group(self):
1144 def create_user_with_group(self):
1143 user = self.create_user()
1145 user = self.create_user()
1144 user_group = self.create_user_group(members=[user])
1146 user_group = self.create_user_group(members=[user])
1145 return user, user_group
1147 return user, user_group
1146
1148
1147 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1149 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1148 auto_cleanup=True, **kwargs):
1150 auto_cleanup=True, **kwargs):
1149 group_name = "{prefix}_usergroup_{count}".format(
1151 group_name = "{prefix}_usergroup_{count}".format(
1150 prefix=self._test_name,
1152 prefix=self._test_name,
1151 count=len(self.user_group_ids))
1153 count=len(self.user_group_ids))
1152 user_group = self.fixture.create_user_group(
1154 user_group = self.fixture.create_user_group(
1153 group_name, cur_user=owner, **kwargs)
1155 group_name, cur_user=owner, **kwargs)
1154
1156
1155 if auto_cleanup:
1157 if auto_cleanup:
1156 self.user_group_ids.append(user_group.users_group_id)
1158 self.user_group_ids.append(user_group.users_group_id)
1157 if members:
1159 if members:
1158 for user in members:
1160 for user in members:
1159 UserGroupModel().add_user_to_group(user_group, user)
1161 UserGroupModel().add_user_to_group(user_group, user)
1160 return user_group
1162 return user_group
1161
1163
1162 def grant_user_permission(self, user_name, permission_name):
1164 def grant_user_permission(self, user_name, permission_name):
1163 self.inherit_default_user_permissions(user_name, False)
1165 self.inherit_default_user_permissions(user_name, False)
1164 self.user_permissions.append((user_name, permission_name))
1166 self.user_permissions.append((user_name, permission_name))
1165
1167
1166 def grant_user_permission_to_repo_group(
1168 def grant_user_permission_to_repo_group(
1167 self, repo_group, user, permission_name):
1169 self, repo_group, user, permission_name):
1168 permission = RepoGroupModel().grant_user_permission(
1170 permission = RepoGroupModel().grant_user_permission(
1169 repo_group, user, permission_name)
1171 repo_group, user, permission_name)
1170 self.user_repo_group_permission_ids.append(
1172 self.user_repo_group_permission_ids.append(
1171 (repo_group.group_id, user.user_id))
1173 (repo_group.group_id, user.user_id))
1172 return permission
1174 return permission
1173
1175
1174 def grant_user_group_permission_to_repo_group(
1176 def grant_user_group_permission_to_repo_group(
1175 self, repo_group, user_group, permission_name):
1177 self, repo_group, user_group, permission_name):
1176 permission = RepoGroupModel().grant_user_group_permission(
1178 permission = RepoGroupModel().grant_user_group_permission(
1177 repo_group, user_group, permission_name)
1179 repo_group, user_group, permission_name)
1178 self.user_group_repo_group_permission_ids.append(
1180 self.user_group_repo_group_permission_ids.append(
1179 (repo_group.group_id, user_group.users_group_id))
1181 (repo_group.group_id, user_group.users_group_id))
1180 return permission
1182 return permission
1181
1183
1182 def grant_user_permission_to_repo(
1184 def grant_user_permission_to_repo(
1183 self, repo, user, permission_name):
1185 self, repo, user, permission_name):
1184 permission = RepoModel().grant_user_permission(
1186 permission = RepoModel().grant_user_permission(
1185 repo, user, permission_name)
1187 repo, user, permission_name)
1186 self.user_repo_permission_ids.append(
1188 self.user_repo_permission_ids.append(
1187 (repo.repo_id, user.user_id))
1189 (repo.repo_id, user.user_id))
1188 return permission
1190 return permission
1189
1191
1190 def grant_user_group_permission_to_repo(
1192 def grant_user_group_permission_to_repo(
1191 self, repo, user_group, permission_name):
1193 self, repo, user_group, permission_name):
1192 permission = RepoModel().grant_user_group_permission(
1194 permission = RepoModel().grant_user_group_permission(
1193 repo, user_group, permission_name)
1195 repo, user_group, permission_name)
1194 self.user_group_repo_permission_ids.append(
1196 self.user_group_repo_permission_ids.append(
1195 (repo.repo_id, user_group.users_group_id))
1197 (repo.repo_id, user_group.users_group_id))
1196 return permission
1198 return permission
1197
1199
1198 def grant_user_permission_to_user_group(
1200 def grant_user_permission_to_user_group(
1199 self, target_user_group, user, permission_name):
1201 self, target_user_group, user, permission_name):
1200 permission = UserGroupModel().grant_user_permission(
1202 permission = UserGroupModel().grant_user_permission(
1201 target_user_group, user, permission_name)
1203 target_user_group, user, permission_name)
1202 self.user_user_group_permission_ids.append(
1204 self.user_user_group_permission_ids.append(
1203 (target_user_group.users_group_id, user.user_id))
1205 (target_user_group.users_group_id, user.user_id))
1204 return permission
1206 return permission
1205
1207
1206 def grant_user_group_permission_to_user_group(
1208 def grant_user_group_permission_to_user_group(
1207 self, target_user_group, user_group, permission_name):
1209 self, target_user_group, user_group, permission_name):
1208 permission = UserGroupModel().grant_user_group_permission(
1210 permission = UserGroupModel().grant_user_group_permission(
1209 target_user_group, user_group, permission_name)
1211 target_user_group, user_group, permission_name)
1210 self.user_group_user_group_permission_ids.append(
1212 self.user_group_user_group_permission_ids.append(
1211 (target_user_group.users_group_id, user_group.users_group_id))
1213 (target_user_group.users_group_id, user_group.users_group_id))
1212 return permission
1214 return permission
1213
1215
1214 def revoke_user_permission(self, user_name, permission_name):
1216 def revoke_user_permission(self, user_name, permission_name):
1215 self.inherit_default_user_permissions(user_name, True)
1217 self.inherit_default_user_permissions(user_name, True)
1216 UserModel().revoke_perm(user_name, permission_name)
1218 UserModel().revoke_perm(user_name, permission_name)
1217
1219
1218 def inherit_default_user_permissions(self, user_name, value):
1220 def inherit_default_user_permissions(self, user_name, value):
1219 user = UserModel().get_by_username(user_name)
1221 user = UserModel().get_by_username(user_name)
1220 user.inherit_default_permissions = value
1222 user.inherit_default_permissions = value
1221 Session().add(user)
1223 Session().add(user)
1222 Session().commit()
1224 Session().commit()
1223
1225
1224 def cleanup(self):
1226 def cleanup(self):
1225 self._cleanup_permissions()
1227 self._cleanup_permissions()
1226 self._cleanup_repos()
1228 self._cleanup_repos()
1227 self._cleanup_repo_groups()
1229 self._cleanup_repo_groups()
1228 self._cleanup_user_groups()
1230 self._cleanup_user_groups()
1229 self._cleanup_users()
1231 self._cleanup_users()
1230
1232
1231 def _cleanup_permissions(self):
1233 def _cleanup_permissions(self):
1232 if self.user_permissions:
1234 if self.user_permissions:
1233 for user_name, permission_name in self.user_permissions:
1235 for user_name, permission_name in self.user_permissions:
1234 self.revoke_user_permission(user_name, permission_name)
1236 self.revoke_user_permission(user_name, permission_name)
1235
1237
1236 for permission in self.user_repo_permission_ids:
1238 for permission in self.user_repo_permission_ids:
1237 RepoModel().revoke_user_permission(*permission)
1239 RepoModel().revoke_user_permission(*permission)
1238
1240
1239 for permission in self.user_group_repo_permission_ids:
1241 for permission in self.user_group_repo_permission_ids:
1240 RepoModel().revoke_user_group_permission(*permission)
1242 RepoModel().revoke_user_group_permission(*permission)
1241
1243
1242 for permission in self.user_repo_group_permission_ids:
1244 for permission in self.user_repo_group_permission_ids:
1243 RepoGroupModel().revoke_user_permission(*permission)
1245 RepoGroupModel().revoke_user_permission(*permission)
1244
1246
1245 for permission in self.user_group_repo_group_permission_ids:
1247 for permission in self.user_group_repo_group_permission_ids:
1246 RepoGroupModel().revoke_user_group_permission(*permission)
1248 RepoGroupModel().revoke_user_group_permission(*permission)
1247
1249
1248 for permission in self.user_user_group_permission_ids:
1250 for permission in self.user_user_group_permission_ids:
1249 UserGroupModel().revoke_user_permission(*permission)
1251 UserGroupModel().revoke_user_permission(*permission)
1250
1252
1251 for permission in self.user_group_user_group_permission_ids:
1253 for permission in self.user_group_user_group_permission_ids:
1252 UserGroupModel().revoke_user_group_permission(*permission)
1254 UserGroupModel().revoke_user_group_permission(*permission)
1253
1255
1254 def _cleanup_repo_groups(self):
1256 def _cleanup_repo_groups(self):
1255 def _repo_group_compare(first_group_id, second_group_id):
1257 def _repo_group_compare(first_group_id, second_group_id):
1256 """
1258 """
1257 Gives higher priority to the groups with the most complex paths
1259 Gives higher priority to the groups with the most complex paths
1258 """
1260 """
1259 first_group = RepoGroup.get(first_group_id)
1261 first_group = RepoGroup.get(first_group_id)
1260 second_group = RepoGroup.get(second_group_id)
1262 second_group = RepoGroup.get(second_group_id)
1261 first_group_parts = (
1263 first_group_parts = (
1262 len(first_group.group_name.split('/')) if first_group else 0)
1264 len(first_group.group_name.split('/')) if first_group else 0)
1263 second_group_parts = (
1265 second_group_parts = (
1264 len(second_group.group_name.split('/')) if second_group else 0)
1266 len(second_group.group_name.split('/')) if second_group else 0)
1265 return cmp(second_group_parts, first_group_parts)
1267 return cmp(second_group_parts, first_group_parts)
1266
1268
1267 sorted_repo_group_ids = sorted(
1269 sorted_repo_group_ids = sorted(
1268 self.repo_group_ids, cmp=_repo_group_compare)
1270 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1269 for repo_group_id in sorted_repo_group_ids:
1271 for repo_group_id in sorted_repo_group_ids:
1270 self.fixture.destroy_repo_group(repo_group_id)
1272 self.fixture.destroy_repo_group(repo_group_id)
1271
1273
1272 def _cleanup_repos(self):
1274 def _cleanup_repos(self):
1273 sorted_repos_ids = sorted(self.repos_ids)
1275 sorted_repos_ids = sorted(self.repos_ids)
1274 for repo_id in sorted_repos_ids:
1276 for repo_id in sorted_repos_ids:
1275 self.fixture.destroy_repo(repo_id)
1277 self.fixture.destroy_repo(repo_id)
1276
1278
1277 def _cleanup_user_groups(self):
1279 def _cleanup_user_groups(self):
1278 def _user_group_compare(first_group_id, second_group_id):
1280 def _user_group_compare(first_group_id, second_group_id):
1279 """
1281 """
1280 Gives higher priority to the groups with the most complex paths
1282 Gives higher priority to the groups with the most complex paths
1281 """
1283 """
1282 first_group = UserGroup.get(first_group_id)
1284 first_group = UserGroup.get(first_group_id)
1283 second_group = UserGroup.get(second_group_id)
1285 second_group = UserGroup.get(second_group_id)
1284 first_group_parts = (
1286 first_group_parts = (
1285 len(first_group.users_group_name.split('/'))
1287 len(first_group.users_group_name.split('/'))
1286 if first_group else 0)
1288 if first_group else 0)
1287 second_group_parts = (
1289 second_group_parts = (
1288 len(second_group.users_group_name.split('/'))
1290 len(second_group.users_group_name.split('/'))
1289 if second_group else 0)
1291 if second_group else 0)
1290 return cmp(second_group_parts, first_group_parts)
1292 return cmp(second_group_parts, first_group_parts)
1291
1293
1292 sorted_user_group_ids = sorted(
1294 sorted_user_group_ids = sorted(
1293 self.user_group_ids, cmp=_user_group_compare)
1295 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1294 for user_group_id in sorted_user_group_ids:
1296 for user_group_id in sorted_user_group_ids:
1295 self.fixture.destroy_user_group(user_group_id)
1297 self.fixture.destroy_user_group(user_group_id)
1296
1298
1297 def _cleanup_users(self):
1299 def _cleanup_users(self):
1298 for user_id in self.user_ids:
1300 for user_id in self.user_ids:
1299 self.fixture.destroy_user(user_id)
1301 self.fixture.destroy_user(user_id)
1300
1302
1301
1303
1302 @pytest.fixture(scope='session')
1304 @pytest.fixture(scope='session')
1303 def testrun():
1305 def testrun():
1304 return {
1306 return {
1305 'uuid': uuid.uuid4(),
1307 'uuid': uuid.uuid4(),
1306 'start': datetime.datetime.utcnow().isoformat(),
1308 'start': datetime.datetime.utcnow().isoformat(),
1307 'timestamp': int(time.time()),
1309 'timestamp': int(time.time()),
1308 }
1310 }
1309
1311
1310
1312
1311 class AppenlightClient(object):
1313 class AppenlightClient(object):
1312
1314
1313 url_template = '{url}?protocol_version=0.5'
1315 url_template = '{url}?protocol_version=0.5'
1314
1316
1315 def __init__(
1317 def __init__(
1316 self, url, api_key, add_server=True, add_timestamp=True,
1318 self, url, api_key, add_server=True, add_timestamp=True,
1317 namespace=None, request=None, testrun=None):
1319 namespace=None, request=None, testrun=None):
1318 self.url = self.url_template.format(url=url)
1320 self.url = self.url_template.format(url=url)
1319 self.api_key = api_key
1321 self.api_key = api_key
1320 self.add_server = add_server
1322 self.add_server = add_server
1321 self.add_timestamp = add_timestamp
1323 self.add_timestamp = add_timestamp
1322 self.namespace = namespace
1324 self.namespace = namespace
1323 self.request = request
1325 self.request = request
1324 self.server = socket.getfqdn(socket.gethostname())
1326 self.server = socket.getfqdn(socket.gethostname())
1325 self.tags_before = {}
1327 self.tags_before = {}
1326 self.tags_after = {}
1328 self.tags_after = {}
1327 self.stats = []
1329 self.stats = []
1328 self.testrun = testrun or {}
1330 self.testrun = testrun or {}
1329
1331
1330 def tag_before(self, tag, value):
1332 def tag_before(self, tag, value):
1331 self.tags_before[tag] = value
1333 self.tags_before[tag] = value
1332
1334
1333 def tag_after(self, tag, value):
1335 def tag_after(self, tag, value):
1334 self.tags_after[tag] = value
1336 self.tags_after[tag] = value
1335
1337
1336 def collect(self, data):
1338 def collect(self, data):
1337 if self.add_server:
1339 if self.add_server:
1338 data.setdefault('server', self.server)
1340 data.setdefault('server', self.server)
1339 if self.add_timestamp:
1341 if self.add_timestamp:
1340 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1342 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1341 if self.namespace:
1343 if self.namespace:
1342 data.setdefault('namespace', self.namespace)
1344 data.setdefault('namespace', self.namespace)
1343 if self.request:
1345 if self.request:
1344 data.setdefault('request', self.request)
1346 data.setdefault('request', self.request)
1345 self.stats.append(data)
1347 self.stats.append(data)
1346
1348
1347 def send_stats(self):
1349 def send_stats(self):
1348 tags = [
1350 tags = [
1349 ('testrun', self.request),
1351 ('testrun', self.request),
1350 ('testrun.start', self.testrun['start']),
1352 ('testrun.start', self.testrun['start']),
1351 ('testrun.timestamp', self.testrun['timestamp']),
1353 ('testrun.timestamp', self.testrun['timestamp']),
1352 ('test', self.namespace),
1354 ('test', self.namespace),
1353 ]
1355 ]
1354 for key, value in self.tags_before.items():
1356 for key, value in self.tags_before.items():
1355 tags.append((key + '.before', value))
1357 tags.append((key + '.before', value))
1356 try:
1358 try:
1357 delta = self.tags_after[key] - value
1359 delta = self.tags_after[key] - value
1358 tags.append((key + '.delta', delta))
1360 tags.append((key + '.delta', delta))
1359 except Exception:
1361 except Exception:
1360 pass
1362 pass
1361 for key, value in self.tags_after.items():
1363 for key, value in self.tags_after.items():
1362 tags.append((key + '.after', value))
1364 tags.append((key + '.after', value))
1363 self.collect({
1365 self.collect({
1364 'message': "Collected tags",
1366 'message': "Collected tags",
1365 'tags': tags,
1367 'tags': tags,
1366 })
1368 })
1367
1369
1368 response = requests.post(
1370 response = requests.post(
1369 self.url,
1371 self.url,
1370 headers={
1372 headers={
1371 'X-appenlight-api-key': self.api_key},
1373 'X-appenlight-api-key': self.api_key},
1372 json=self.stats,
1374 json=self.stats,
1373 )
1375 )
1374
1376
1375 if not response.status_code == 200:
1377 if not response.status_code == 200:
1376 pprint.pprint(self.stats)
1378 pprint.pprint(self.stats)
1377 print(response.headers)
1379 print(response.headers)
1378 print(response.text)
1380 print(response.text)
1379 raise Exception('Sending to appenlight failed')
1381 raise Exception('Sending to appenlight failed')
1380
1382
1381
1383
1382 @pytest.fixture()
1384 @pytest.fixture()
1383 def gist_util(request, db_connection):
1385 def gist_util(request, db_connection):
1384 """
1386 """
1385 Provides a wired instance of `GistUtility` with integrated cleanup.
1387 Provides a wired instance of `GistUtility` with integrated cleanup.
1386 """
1388 """
1387 utility = GistUtility()
1389 utility = GistUtility()
1388 request.addfinalizer(utility.cleanup)
1390 request.addfinalizer(utility.cleanup)
1389 return utility
1391 return utility
1390
1392
1391
1393
1392 class GistUtility(object):
1394 class GistUtility(object):
1393 def __init__(self):
1395 def __init__(self):
1394 self.fixture = Fixture()
1396 self.fixture = Fixture()
1395 self.gist_ids = []
1397 self.gist_ids = []
1396
1398
1397 def create_gist(self, **kwargs):
1399 def create_gist(self, **kwargs):
1398 gist = self.fixture.create_gist(**kwargs)
1400 gist = self.fixture.create_gist(**kwargs)
1399 self.gist_ids.append(gist.gist_id)
1401 self.gist_ids.append(gist.gist_id)
1400 return gist
1402 return gist
1401
1403
1402 def cleanup(self):
1404 def cleanup(self):
1403 for id_ in self.gist_ids:
1405 for id_ in self.gist_ids:
1404 self.fixture.destroy_gists(str(id_))
1406 self.fixture.destroy_gists(str(id_))
1405
1407
1406
1408
1407 @pytest.fixture()
1409 @pytest.fixture()
1408 def enabled_backends(request):
1410 def enabled_backends(request):
1409 backends = request.config.option.backends
1411 backends = request.config.option.backends
1410 return backends[:]
1412 return backends[:]
1411
1413
1412
1414
1413 @pytest.fixture()
1415 @pytest.fixture()
1414 def settings_util(request, db_connection):
1416 def settings_util(request, db_connection):
1415 """
1417 """
1416 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1418 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1417 """
1419 """
1418 utility = SettingsUtility()
1420 utility = SettingsUtility()
1419 request.addfinalizer(utility.cleanup)
1421 request.addfinalizer(utility.cleanup)
1420 return utility
1422 return utility
1421
1423
1422
1424
1423 class SettingsUtility(object):
1425 class SettingsUtility(object):
1424 def __init__(self):
1426 def __init__(self):
1425 self.rhodecode_ui_ids = []
1427 self.rhodecode_ui_ids = []
1426 self.rhodecode_setting_ids = []
1428 self.rhodecode_setting_ids = []
1427 self.repo_rhodecode_ui_ids = []
1429 self.repo_rhodecode_ui_ids = []
1428 self.repo_rhodecode_setting_ids = []
1430 self.repo_rhodecode_setting_ids = []
1429
1431
1430 def create_repo_rhodecode_ui(
1432 def create_repo_rhodecode_ui(
1431 self, repo, section, value, key=None, active=True, cleanup=True):
1433 self, repo, section, value, key=None, active=True, cleanup=True):
1432 key = key or hashlib.sha1(
1434 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1433 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1434
1435
1435 setting = RepoRhodeCodeUi()
1436 setting = RepoRhodeCodeUi()
1436 setting.repository_id = repo.repo_id
1437 setting.repository_id = repo.repo_id
1437 setting.ui_section = section
1438 setting.ui_section = section
1438 setting.ui_value = value
1439 setting.ui_value = value
1439 setting.ui_key = key
1440 setting.ui_key = key
1440 setting.ui_active = active
1441 setting.ui_active = active
1441 Session().add(setting)
1442 Session().add(setting)
1442 Session().commit()
1443 Session().commit()
1443
1444
1444 if cleanup:
1445 if cleanup:
1445 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1446 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1446 return setting
1447 return setting
1447
1448
1448 def create_rhodecode_ui(
1449 def create_rhodecode_ui(
1449 self, section, value, key=None, active=True, cleanup=True):
1450 self, section, value, key=None, active=True, cleanup=True):
1450 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1451 key = key or sha1_safe(f'{section}{value}')
1451
1452
1452 setting = RhodeCodeUi()
1453 setting = RhodeCodeUi()
1453 setting.ui_section = section
1454 setting.ui_section = section
1454 setting.ui_value = value
1455 setting.ui_value = value
1455 setting.ui_key = key
1456 setting.ui_key = key
1456 setting.ui_active = active
1457 setting.ui_active = active
1457 Session().add(setting)
1458 Session().add(setting)
1458 Session().commit()
1459 Session().commit()
1459
1460
1460 if cleanup:
1461 if cleanup:
1461 self.rhodecode_ui_ids.append(setting.ui_id)
1462 self.rhodecode_ui_ids.append(setting.ui_id)
1462 return setting
1463 return setting
1463
1464
1464 def create_repo_rhodecode_setting(
1465 def create_repo_rhodecode_setting(
1465 self, repo, name, value, type_, cleanup=True):
1466 self, repo, name, value, type_, cleanup=True):
1466 setting = RepoRhodeCodeSetting(
1467 setting = RepoRhodeCodeSetting(
1467 repo.repo_id, key=name, val=value, type=type_)
1468 repo.repo_id, key=name, val=value, type=type_)
1468 Session().add(setting)
1469 Session().add(setting)
1469 Session().commit()
1470 Session().commit()
1470
1471
1471 if cleanup:
1472 if cleanup:
1472 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1473 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1473 return setting
1474 return setting
1474
1475
1475 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1476 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1476 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1477 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1477 Session().add(setting)
1478 Session().add(setting)
1478 Session().commit()
1479 Session().commit()
1479
1480
1480 if cleanup:
1481 if cleanup:
1481 self.rhodecode_setting_ids.append(setting.app_settings_id)
1482 self.rhodecode_setting_ids.append(setting.app_settings_id)
1482
1483
1483 return setting
1484 return setting
1484
1485
1485 def cleanup(self):
1486 def cleanup(self):
1486 for id_ in self.rhodecode_ui_ids:
1487 for id_ in self.rhodecode_ui_ids:
1487 setting = RhodeCodeUi.get(id_)
1488 setting = RhodeCodeUi.get(id_)
1488 Session().delete(setting)
1489 Session().delete(setting)
1489
1490
1490 for id_ in self.rhodecode_setting_ids:
1491 for id_ in self.rhodecode_setting_ids:
1491 setting = RhodeCodeSetting.get(id_)
1492 setting = RhodeCodeSetting.get(id_)
1492 Session().delete(setting)
1493 Session().delete(setting)
1493
1494
1494 for id_ in self.repo_rhodecode_ui_ids:
1495 for id_ in self.repo_rhodecode_ui_ids:
1495 setting = RepoRhodeCodeUi.get(id_)
1496 setting = RepoRhodeCodeUi.get(id_)
1496 Session().delete(setting)
1497 Session().delete(setting)
1497
1498
1498 for id_ in self.repo_rhodecode_setting_ids:
1499 for id_ in self.repo_rhodecode_setting_ids:
1499 setting = RepoRhodeCodeSetting.get(id_)
1500 setting = RepoRhodeCodeSetting.get(id_)
1500 Session().delete(setting)
1501 Session().delete(setting)
1501
1502
1502 Session().commit()
1503 Session().commit()
1503
1504
1504
1505
1505 @pytest.fixture()
1506 @pytest.fixture()
1506 def no_notifications(request):
1507 def no_notifications(request):
1507 notification_patcher = mock.patch(
1508 notification_patcher = mock.patch(
1508 'rhodecode.model.notification.NotificationModel.create')
1509 'rhodecode.model.notification.NotificationModel.create')
1509 notification_patcher.start()
1510 notification_patcher.start()
1510 request.addfinalizer(notification_patcher.stop)
1511 request.addfinalizer(notification_patcher.stop)
1511
1512
1512
1513
1513 @pytest.fixture(scope='session')
1514 @pytest.fixture(scope='session')
1514 def repeat(request):
1515 def repeat(request):
1515 """
1516 """
1516 The number of repetitions is based on this fixture.
1517 The number of repetitions is based on this fixture.
1517
1518
1518 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1519 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1519 tests are not too slow in our default test suite.
1520 tests are not too slow in our default test suite.
1520 """
1521 """
1521 return request.config.getoption('--repeat')
1522 return request.config.getoption('--repeat')
1522
1523
1523
1524
1524 @pytest.fixture()
1525 @pytest.fixture()
1525 def rhodecode_fixtures():
1526 def rhodecode_fixtures():
1526 return Fixture()
1527 return Fixture()
1527
1528
1528
1529
1529 @pytest.fixture()
1530 @pytest.fixture()
1530 def context_stub():
1531 def context_stub():
1531 """
1532 """
1532 Stub context object.
1533 Stub context object.
1533 """
1534 """
1534 context = pyramid.testing.DummyResource()
1535 context = pyramid.testing.DummyResource()
1535 return context
1536 return context
1536
1537
1537
1538
1538 @pytest.fixture()
1539 @pytest.fixture()
1539 def request_stub():
1540 def request_stub():
1540 """
1541 """
1541 Stub request object.
1542 Stub request object.
1542 """
1543 """
1543 from rhodecode.lib.base import bootstrap_request
1544 from rhodecode.lib.base import bootstrap_request
1544 request = bootstrap_request(scheme='https')
1545 request = bootstrap_request(scheme='https')
1545 return request
1546 return request
1546
1547
1547
1548
1548 @pytest.fixture()
1549 @pytest.fixture()
1549 def config_stub(request, request_stub):
1550 def config_stub(request, request_stub):
1550 """
1551 """
1551 Set up pyramid.testing and return the Configurator.
1552 Set up pyramid.testing and return the Configurator.
1552 """
1553 """
1553 from rhodecode.lib.base import bootstrap_config
1554 from rhodecode.lib.base import bootstrap_config
1554 config = bootstrap_config(request=request_stub)
1555 config = bootstrap_config(request=request_stub)
1555
1556
1556 @request.addfinalizer
1557 @request.addfinalizer
1557 def cleanup():
1558 def cleanup():
1558 pyramid.testing.tearDown()
1559 pyramid.testing.tearDown()
1559
1560
1560 return config
1561 return config
1561
1562
1562
1563
1563 @pytest.fixture()
1564 @pytest.fixture()
1564 def StubIntegrationType():
1565 def StubIntegrationType():
1565 class _StubIntegrationType(IntegrationTypeBase):
1566 class _StubIntegrationType(IntegrationTypeBase):
1566 """ Test integration type class """
1567 """ Test integration type class """
1567
1568
1568 key = 'test'
1569 key = 'test'
1569 display_name = 'Test integration type'
1570 display_name = 'Test integration type'
1570 description = 'A test integration type for testing'
1571 description = 'A test integration type for testing'
1571
1572
1572 @classmethod
1573 @classmethod
1573 def icon(cls):
1574 def icon(cls):
1574 return 'test_icon_html_image'
1575 return 'test_icon_html_image'
1575
1576
1576 def __init__(self, settings):
1577 def __init__(self, settings):
1577 super(_StubIntegrationType, self).__init__(settings)
1578 super(_StubIntegrationType, self).__init__(settings)
1578 self.sent_events = [] # for testing
1579 self.sent_events = [] # for testing
1579
1580
1580 def send_event(self, event):
1581 def send_event(self, event):
1581 self.sent_events.append(event)
1582 self.sent_events.append(event)
1582
1583
1583 def settings_schema(self):
1584 def settings_schema(self):
1584 class SettingsSchema(colander.Schema):
1585 class SettingsSchema(colander.Schema):
1585 test_string_field = colander.SchemaNode(
1586 test_string_field = colander.SchemaNode(
1586 colander.String(),
1587 colander.String(),
1587 missing=colander.required,
1588 missing=colander.required,
1588 title='test string field',
1589 title='test string field',
1589 )
1590 )
1590 test_int_field = colander.SchemaNode(
1591 test_int_field = colander.SchemaNode(
1591 colander.Int(),
1592 colander.Int(),
1592 title='some integer setting',
1593 title='some integer setting',
1593 )
1594 )
1594 return SettingsSchema()
1595 return SettingsSchema()
1595
1596
1596
1597
1597 integration_type_registry.register_integration_type(_StubIntegrationType)
1598 integration_type_registry.register_integration_type(_StubIntegrationType)
1598 return _StubIntegrationType
1599 return _StubIntegrationType
1599
1600
1600
1601
1601 @pytest.fixture()
1602 @pytest.fixture()
1602 def stub_integration_settings():
1603 def stub_integration_settings():
1603 return {
1604 return {
1604 'test_string_field': 'some data',
1605 'test_string_field': 'some data',
1605 'test_int_field': 100,
1606 'test_int_field': 100,
1606 }
1607 }
1607
1608
1608
1609
1609 @pytest.fixture()
1610 @pytest.fixture()
1610 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1611 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1611 stub_integration_settings):
1612 stub_integration_settings):
1612 integration = IntegrationModel().create(
1613 integration = IntegrationModel().create(
1613 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1614 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1614 name='test repo integration',
1615 name='test repo integration',
1615 repo=repo_stub, repo_group=None, child_repos_only=None)
1616 repo=repo_stub, repo_group=None, child_repos_only=None)
1616
1617
1617 @request.addfinalizer
1618 @request.addfinalizer
1618 def cleanup():
1619 def cleanup():
1619 IntegrationModel().delete(integration)
1620 IntegrationModel().delete(integration)
1620
1621
1621 return integration
1622 return integration
1622
1623
1623
1624
1624 @pytest.fixture()
1625 @pytest.fixture()
1625 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1626 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1626 stub_integration_settings):
1627 stub_integration_settings):
1627 integration = IntegrationModel().create(
1628 integration = IntegrationModel().create(
1628 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1629 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1629 name='test repogroup integration',
1630 name='test repogroup integration',
1630 repo=None, repo_group=test_repo_group, child_repos_only=True)
1631 repo=None, repo_group=test_repo_group, child_repos_only=True)
1631
1632
1632 @request.addfinalizer
1633 @request.addfinalizer
1633 def cleanup():
1634 def cleanup():
1634 IntegrationModel().delete(integration)
1635 IntegrationModel().delete(integration)
1635
1636
1636 return integration
1637 return integration
1637
1638
1638
1639
1639 @pytest.fixture()
1640 @pytest.fixture()
1640 def repogroup_recursive_integration_stub(request, test_repo_group,
1641 def repogroup_recursive_integration_stub(request, test_repo_group,
1641 StubIntegrationType, stub_integration_settings):
1642 StubIntegrationType, stub_integration_settings):
1642 integration = IntegrationModel().create(
1643 integration = IntegrationModel().create(
1643 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1644 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1644 name='test recursive repogroup integration',
1645 name='test recursive repogroup integration',
1645 repo=None, repo_group=test_repo_group, child_repos_only=False)
1646 repo=None, repo_group=test_repo_group, child_repos_only=False)
1646
1647
1647 @request.addfinalizer
1648 @request.addfinalizer
1648 def cleanup():
1649 def cleanup():
1649 IntegrationModel().delete(integration)
1650 IntegrationModel().delete(integration)
1650
1651
1651 return integration
1652 return integration
1652
1653
1653
1654
1654 @pytest.fixture()
1655 @pytest.fixture()
1655 def global_integration_stub(request, StubIntegrationType,
1656 def global_integration_stub(request, StubIntegrationType,
1656 stub_integration_settings):
1657 stub_integration_settings):
1657 integration = IntegrationModel().create(
1658 integration = IntegrationModel().create(
1658 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1659 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1659 name='test global integration',
1660 name='test global integration',
1660 repo=None, repo_group=None, child_repos_only=None)
1661 repo=None, repo_group=None, child_repos_only=None)
1661
1662
1662 @request.addfinalizer
1663 @request.addfinalizer
1663 def cleanup():
1664 def cleanup():
1664 IntegrationModel().delete(integration)
1665 IntegrationModel().delete(integration)
1665
1666
1666 return integration
1667 return integration
1667
1668
1668
1669
1669 @pytest.fixture()
1670 @pytest.fixture()
1670 def root_repos_integration_stub(request, StubIntegrationType,
1671 def root_repos_integration_stub(request, StubIntegrationType,
1671 stub_integration_settings):
1672 stub_integration_settings):
1672 integration = IntegrationModel().create(
1673 integration = IntegrationModel().create(
1673 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1674 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1674 name='test global integration',
1675 name='test global integration',
1675 repo=None, repo_group=None, child_repos_only=True)
1676 repo=None, repo_group=None, child_repos_only=True)
1676
1677
1677 @request.addfinalizer
1678 @request.addfinalizer
1678 def cleanup():
1679 def cleanup():
1679 IntegrationModel().delete(integration)
1680 IntegrationModel().delete(integration)
1680
1681
1681 return integration
1682 return integration
1682
1683
1683
1684
1684 @pytest.fixture()
1685 @pytest.fixture()
1685 def local_dt_to_utc():
1686 def local_dt_to_utc():
1686 def _factory(dt):
1687 def _factory(dt):
1687 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1688 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1688 dateutil.tz.tzutc()).replace(tzinfo=None)
1689 dateutil.tz.tzutc()).replace(tzinfo=None)
1689 return _factory
1690 return _factory
1690
1691
1691
1692
1692 @pytest.fixture()
1693 @pytest.fixture()
1693 def disable_anonymous_user(request, baseapp):
1694 def disable_anonymous_user(request, baseapp):
1694 set_anonymous_access(False)
1695 set_anonymous_access(False)
1695
1696
1696 @request.addfinalizer
1697 @request.addfinalizer
1697 def cleanup():
1698 def cleanup():
1698 set_anonymous_access(True)
1699 set_anonymous_access(True)
1699
1700
1700
1701
1701 @pytest.fixture(scope='module')
1702 @pytest.fixture(scope='module')
1702 def rc_fixture(request):
1703 def rc_fixture(request):
1703 return Fixture()
1704 return Fixture()
1704
1705
1705
1706
1706 @pytest.fixture()
1707 @pytest.fixture()
1707 def repo_groups(request):
1708 def repo_groups(request):
1708 fixture = Fixture()
1709 fixture = Fixture()
1709
1710
1710 session = Session()
1711 session = Session()
1711 zombie_group = fixture.create_repo_group('zombie')
1712 zombie_group = fixture.create_repo_group('zombie')
1712 parent_group = fixture.create_repo_group('parent')
1713 parent_group = fixture.create_repo_group('parent')
1713 child_group = fixture.create_repo_group('parent/child')
1714 child_group = fixture.create_repo_group('parent/child')
1714 groups_in_db = session.query(RepoGroup).all()
1715 groups_in_db = session.query(RepoGroup).all()
1715 assert len(groups_in_db) == 3
1716 assert len(groups_in_db) == 3
1716 assert child_group.group_parent_id == parent_group.group_id
1717 assert child_group.group_parent_id == parent_group.group_id
1717
1718
1718 @request.addfinalizer
1719 @request.addfinalizer
1719 def cleanup():
1720 def cleanup():
1720 fixture.destroy_repo_group(zombie_group)
1721 fixture.destroy_repo_group(zombie_group)
1721 fixture.destroy_repo_group(child_group)
1722 fixture.destroy_repo_group(child_group)
1722 fixture.destroy_repo_group(parent_group)
1723 fixture.destroy_repo_group(parent_group)
1723
1724
1724 return zombie_group, parent_group, child_group
1725 return zombie_group, parent_group, child_group
@@ -1,51 +1,51 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24
24
25 @pytest.mark.usefixtures('autologin_user', 'app')
25 @pytest.mark.usefixtures('autologin_user', 'app')
26 def test_vcs_available_returns_summary_page(app, backend):
26 def test_vcs_available_returns_summary_page(app, backend):
27 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
27 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
28 response = app.get(url)
28 response = app.get(url)
29 assert response.status_code == 200
29 assert response.status_code == 200
30 assert 'Summary' in response.body
30 assert 'Summary' in response.text
31
31
32
32
33 @pytest.mark.usefixtures('autologin_user', 'app')
33 @pytest.mark.usefixtures('autologin_user', 'app')
34 def test_vcs_unavailable_returns_vcs_error_page(app, backend):
34 def test_vcs_unavailable_returns_vcs_error_page(app, backend):
35 from rhodecode.lib.vcs.exceptions import VCSCommunicationError
35 from rhodecode.lib.vcs.exceptions import VCSCommunicationError
36
36
37 # Depending on the used VCSServer protocol we have to patch a different
37 # Depending on the used VCSServer protocol we have to patch a different
38 # RemoteRepo class to raise an exception. For the test it doesn't matter
38 # RemoteRepo class to raise an exception. For the test it doesn't matter
39 # if http is used, it just requires the exception to be raised.
39 # if http is used, it just requires the exception to be raised.
40 from rhodecode.lib.vcs.client_http import RemoteRepo
40 from rhodecode.lib.vcs.client_http import RemoteRepo
41
41
42 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
42 url = '/{repo_name}'.format(repo_name=backend.repo.repo_name)
43
43
44 # Patch remote repo to raise an exception instead of making a RPC.
44 # Patch remote repo to raise an exception instead of making a RPC.
45 with mock.patch.object(RemoteRepo, '__getattr__') as remote_mock:
45 with mock.patch.object(RemoteRepo, '__getattr__') as remote_mock:
46 remote_mock.side_effect = VCSCommunicationError()
46 remote_mock.side_effect = VCSCommunicationError()
47
47
48 response = app.get(url, expect_errors=True)
48 response = app.get(url, expect_errors=True)
49
49
50 assert response.status_code == 502
50 assert response.status_code == 502
51 assert 'Could not connect to VCS Server' in response.body
51 assert 'Could not connect to VCS Server' in response.text
@@ -1,99 +1,99 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from rhodecode.tests.utils import CustomTestApp
21 from rhodecode.tests.utils import CustomTestApp
22 from rhodecode.lib.middleware.utils import wsgi_app_caller_client
22 from rhodecode.lib.middleware.utils import wsgi_app_caller_client
23
23
24 # pylint: disable=protected-access,too-many-public-methods
24 # pylint: disable=protected-access,too-many-public-methods
25
25
26
26
27 BASE_ENVIRON = {
27 BASE_ENVIRON = {
28 'REQUEST_METHOD': 'GET',
28 'REQUEST_METHOD': 'GET',
29 'SERVER_NAME': 'localhost',
29 'SERVER_NAME': 'localhost',
30 'SERVER_PORT': '80',
30 'SERVER_PORT': '80',
31 'SCRIPT_NAME': '',
31 'SCRIPT_NAME': '',
32 'PATH_INFO': '/',
32 'PATH_INFO': '/',
33 'QUERY_STRING': '',
33 'QUERY_STRING': '',
34 'foo.bool_var': True,
34 'foo.bool_var': True,
35 'foo.str_var': 'True',
35 'foo.str_var': 'True',
36 'wsgi.foo': True,
36 'wsgi.foo': True,
37 # Some non string values. The validator expects to get an iterable as
37 # Some non string values. The validator expects to get an iterable as
38 # value.
38 # value.
39 (42,): '42',
39 (42,): '42',
40 (True,): 'False',
40 (True,): 'False',
41 }
41 }
42
42
43
43
44 def assert_all_values_are_str(environ):
44 def assert_all_values_are_str(environ):
45 """Checks that all values of a dict are str."""
45 """Checks that all values of a dict are str."""
46 for key, value in environ.items():
46 for key, value in environ.items():
47 assert isinstance(value, str), (
47 assert isinstance(value, str), (
48 "Value for key %s: has type %s but 'str' was expected. Value: %s" %
48 "Value for key %s: has type %s but 'str' was expected. Value: %s" %
49 (key, type(value), repr(value)))
49 (key, type(value), repr(value)))
50
50
51
51
52 def assert_all_keys_are_str(environ):
52 def assert_all_keys_are_str(environ):
53 """Checks that all keys of a dict are str."""
53 """Checks that all keys of a dict are str."""
54 for key, value in environ.items():
54 for key, value in environ.items():
55 assert isinstance(value, str), (
55 assert isinstance(value, str), (
56 "Key %s: has type %s but 'str' was expected. " %
56 "Key %s: has type %s but 'str' was expected. " %
57 (repr(key), type(key)))
57 (repr(key), type(key)))
58
58
59
59
60 def assert_no_prefix_in_keys(environ, prefix):
60 def assert_no_prefix_in_keys(environ, prefix):
61 """Checks that no key of the dict starts with the prefix."""
61 """Checks that no key of the dict starts with the prefix."""
62 for key in environ:
62 for key in environ:
63 assert not key.startswith(prefix), 'Key %s should not be present' % key
63 assert not key.startswith(prefix), 'Key %s should not be present' % key
64
64
65
65
66 def test_get_environ():
66 def test_get_environ():
67 clean_environ = wsgi_app_caller_client._get_clean_environ(BASE_ENVIRON)
67 clean_environ = wsgi_app_caller_client._get_clean_environ(BASE_ENVIRON)
68
68
69 assert len(clean_environ) == 7
69 assert len(clean_environ) == 7
70 assert_no_prefix_in_keys(clean_environ, 'wsgi.')
70 assert_no_prefix_in_keys(clean_environ, 'wsgi.')
71 assert_all_keys_are_str(clean_environ)
71 assert_all_keys_are_str(clean_environ)
72 assert_all_values_are_str(clean_environ)
72 assert_all_values_are_str(clean_environ)
73
73
74
74
75 def test_remote_app_caller():
75 def test_remote_app_caller():
76
76
77 class RemoteAppCallerMock(object):
77 class RemoteAppCallerMock(object):
78
78
79 def handle(self, environ, input_data, arg1, arg2,
79 def handle(self, environ, input_data, arg1, arg2,
80 arg3=None, arg4=None, arg5=None):
80 arg3=None, arg4=None, arg5=None):
81 assert ((arg1, arg2, arg3, arg4, arg5) ==
81 assert ((arg1, arg2, arg3, arg4, arg5) ==
82 ('a1', 'a2', 'a3', 'a4', None))
82 ('a1', 'a2', 'a3', 'a4', None))
83 # Note: RemoteAppCaller is expected to return a tuple like the
83 # Note: RemoteAppCaller is expected to return a tuple like the
84 # following one
84 # following one
85 return (['content'], '200 OK', [('Content-Type', 'text/plain')])
85 return (['content'], '200 OK', [('Content-Type', 'text/plain')])
86
86
87 wrapper_app = wsgi_app_caller_client.RemoteAppCaller(
87 wrapper_app = wsgi_app_caller_client.RemoteAppCaller(
88 RemoteAppCallerMock(), 'a1', 'a2', arg3='a3', arg4='a4')
88 RemoteAppCallerMock(), 'a1', 'a2', arg3='a3', arg4='a4')
89
89
90 test_app = CustomTestApp(wrapper_app)
90 test_app = CustomTestApp(wrapper_app)
91
91
92 response = test_app.get('/path')
92 response = test_app.get('/path')
93
93
94 assert response.status == '200 OK'
94 assert response.status == '200 OK'
95 assert sorted(response.headers.items()) == sorted([
95 assert sorted(response.headers.items()) == sorted([
96 ('Content-Type', 'text/plain'),
96 ('Content-Type', 'text/plain'),
97 ('Content-Length', '7'),
97 ('Content-Length', '7'),
98 ])
98 ])
99 assert response.body == 'content'
99 assert response.text == 'content'
@@ -1,133 +1,136 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib.encrypt import (
23 from rhodecode.lib.encrypt import (
24 AESCipher, SignatureVerificationError, InvalidDecryptedValue)
24 AESCipher, InvalidDecryptedValue)
25 from rhodecode.lib.encrypt2 import (Encryptor, InvalidToken)
25 from rhodecode.lib import enc_utils
26 from rhodecode.lib.str_utils import safe_str
27 from rhodecode.lib.exceptions import SignatureVerificationError
28
29
30 @pytest.mark.parametrize(
31 "algo", ['fernet', 'aes'],
32 )
33 @pytest.mark.parametrize(
34 "key, text",
35 [
36 (b'a', 'short'),
37 (b'a' * 64, 'too long(trimmed to 32)'),
38 (b'a' * 32, 'just enough'),
39 ('Δ…Δ‡Δ™Δ‡Δ™', 'non asci'),
40 ('$asa$asa', 'special $ used'),
41 ]
42 )
43 @pytest.mark.parametrize(
44 "strict_mode", [True, False],
45 )
46 def test_common_encryption_module(algo, key, text, strict_mode):
47 encrypted = enc_utils.encrypt_value(text, algo=algo, enc_key=key)
48 decrypted = enc_utils.decrypt_value(encrypted, algo=algo, enc_key=key, strict_mode=strict_mode)
49 assert text == safe_str(decrypted)
50
51
52 @pytest.mark.parametrize(
53 "algo", ['fernet', 'aes'],
54 )
55 def test_encryption_with_bad_key(algo):
56 key = b'secretstring'
57 text = b'ihatemysql'
58
59 encrypted = enc_utils.encrypt_value(text, algo=algo, enc_key=key)
60 decrypted = enc_utils.decrypt_value(encrypted, algo=algo, enc_key=b'different-key', strict_mode=False)
61
62 assert decrypted[:22] == '<InvalidDecryptedValue'
63
64
65 @pytest.mark.parametrize(
66 "algo", ['fernet', 'aes'],
67 )
68 def test_encryption_with_bad_key_raises(algo):
69 key = b'secretstring'
70 text = b'ihatemysql'
71 encrypted = enc_utils.encrypt_value(text, algo=algo, enc_key=key)
72
73 with pytest.raises(SignatureVerificationError) as e:
74 enc_utils.decrypt_value(encrypted, algo=algo, enc_key=b'different-key', strict_mode=True)
75
76 assert 'InvalidDecryptedValue' in str(e)
26
77
27
78
28 class TestEncryptModule(object):
79 @pytest.mark.parametrize(
29
80 "algo", ['fernet', 'aes'],
30 @pytest.mark.parametrize(
81 )
31 "key, text",
82 def test_encryption_with_bad_format_data(algo):
32 [
83 key = b'secret'
33 ('a', 'short'),
84 text = b'ihatemysql'
34 ('a'*64, 'too long(trimmed to 32)'),
85 encrypted = enc_utils.encrypt_value(text, algo=algo, enc_key=key)
35 ('a'*32, 'just enough'),
86 encrypted = b'$xyz' + encrypted[3:]
36 ('Δ…Δ‡Δ™Δ‡Δ™', 'non asci'),
37 ('$asa$asa', 'special $ used'),
38 ]
39 )
40 def test_encryption(self, key, text):
41 enc = AESCipher(key).encrypt(text)
42 assert AESCipher(key).decrypt(enc) == text
43
44 def test_encryption_with_hmac(self):
45 key = 'secret'
46 text = 'ihatemysql'
47 enc = AESCipher(key, hmac=True).encrypt(text)
48 assert AESCipher(key, hmac=True).decrypt(enc) == text
49
87
50 def test_encryption_with_hmac_with_bad_key(self):
88 with pytest.raises(ValueError) as e:
51 key = 'secretstring'
89 enc_utils.decrypt_value(encrypted, algo=algo, enc_key=key, strict_mode=True)
52 text = 'ihatemysql'
53 enc = AESCipher(key, hmac=True).encrypt(text)
54
55 with pytest.raises(SignatureVerificationError) as e:
56 assert AESCipher('differentsecret', hmac=True).decrypt(enc) == ''
57
58 assert 'Encryption signature verification failed' in str(e)
59
90
60 def test_encryption_with_hmac_with_bad_data(self):
91 assert 'Encrypted Data has invalid format' in str(e)
61 key = 'secret'
62 text = 'ihatemysql'
63 enc = AESCipher(key, hmac=True).encrypt(text)
64 enc = 'xyz' + enc[3:]
65 with pytest.raises(SignatureVerificationError) as e:
66 assert AESCipher(key, hmac=True).decrypt(enc) == text
67
68 assert 'Encryption signature verification failed' in str(e)
69
70 def test_encryption_with_hmac_with_bad_key_not_strict(self):
71 key = 'secretstring'
72 text = 'ihatemysql'
73 enc = AESCipher(key, hmac=True).encrypt(text)
74
75 assert isinstance(AESCipher(
76 'differentsecret', hmac=True, strict_verification=False
77 ).decrypt(enc), InvalidDecryptedValue)
78
92
79
93
80 class TestEncryptModule2(object):
94 @pytest.mark.parametrize(
95 "algo", ['fernet', 'aes'],
96 )
97 def test_encryption_with_bad_data(algo):
98 key = b'secret'
99 text = b'ihatemysql'
100 encrypted = enc_utils.encrypt_value(text, algo=algo, enc_key=key)
101 encrypted = encrypted[:-5]
81
102
82 @pytest.mark.parametrize(
103 with pytest.raises(SignatureVerificationError) as e:
83 "key, text",
104 enc_utils.decrypt_value(encrypted, algo=algo, enc_key=key, strict_mode=True)
84 [
105
85 ('a', 'short'),
106 assert 'SignatureVerificationError' in str(e)
86 ('a'*64, 'too long(trimmed to 32)'),
107
87 ('a'*32, 'just enough'),
88 ('Δ…Δ‡Δ™Δ‡Δ™', 'non asci'),
89 ('$asa$asa', 'special $ used'),
90 ]
91 )
92 def test_encryption(self, key, text):
93 enc = Encryptor(key).encrypt(text)
94 assert Encryptor(key).decrypt(enc) == text
95
108
96 def test_encryption_with_bad_key(self):
109 def test_encryption_with_hmac():
97 key = 'secretstring'
110 key = b'secret'
98 text = 'ihatemysql'
111 text = b'ihatemysql'
99 enc = Encryptor(key).encrypt(text)
112 enc = AESCipher(key, hmac=True).encrypt(text)
113 assert AESCipher(key, hmac=True).decrypt(enc) == text
100
114
101 assert Encryptor('differentsecret').decrypt(enc) == ''
102
115
103 def test_encryption_with_bad_key_raises(self):
116 def test_encryption_with_hmac_with_bad_data():
104 key = 'secretstring'
117 key = b'secret'
105 text = 'ihatemysql'
118 text = b'ihatemysql'
106 enc = Encryptor(key).encrypt(text)
119 enc = AESCipher(key, hmac=True).encrypt(text)
107
120 enc = b'xyz' + enc[3:]
108 with pytest.raises(InvalidToken) as e:
121 with pytest.raises(SignatureVerificationError) as e:
109 Encryptor('differentsecret').decrypt(enc, safe=False)
122 assert AESCipher(key, hmac=True).decrypt(enc, safe=False) == text
110
123
111 assert 'InvalidToken' in str(e)
124 assert 'SignatureVerificationError' in str(e)
112
125
113 def test_encryption_with_bad_format_data(self):
114 key = 'secret'
115 text = 'ihatemysql'
116 enc = Encryptor(key).encrypt(text)
117 enc = '$xyz' + enc[3:]
118
126
119 with pytest.raises(ValueError) as e:
127 def test_encryption_with_hmac_with_bad_key_not_strict():
120 Encryptor(key).decrypt(enc, safe=False)
128 key = b'secretstring'
121
129 text = b'ihatemysql'
122 assert 'Encrypted Data has invalid format' in str(e)
130 enc = AESCipher(key, hmac=True).encrypt(text)
123
131
124 def test_encryption_with_bad_data(self):
132 decrypted = AESCipher(
125 key = 'secret'
133 b'differentsecret', hmac=True, strict_verification=False
126 text = 'ihatemysql'
134 ).decrypt(enc)
127 enc = Encryptor(key).encrypt(text)
128 enc = enc[:-5]
129
135
130 with pytest.raises(InvalidToken) as e:
136 assert isinstance(decrypted, InvalidDecryptedValue)
131 Encryptor(key).decrypt(enc, safe=False)
132
133 assert 'InvalidToken' in str(e)
@@ -1,463 +1,463 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Module to test the performance of pull, push and clone operations.
22 Module to test the performance of pull, push and clone operations.
23
23
24 It works by replaying a group of commits to the repo.
24 It works by replaying a group of commits to the repo.
25 """
25 """
26
26
27 import argparse
27 import argparse
28 import collections
28 import collections
29 import ConfigParser
29 import ConfigParser
30 import functools
30 import functools
31 import itertools
31 import itertools
32 import os
32 import os
33 import pprint
33 import pprint
34 import shutil
34 import shutil
35 import subprocess
35 import subprocess
36 import sys
36 import sys
37 import time
37 import time
38
38
39 import api
39 import api
40
40
41
41
42 def mean(container):
42 def mean(container):
43 """Return the mean of the container."""
43 """Return the mean of the container."""
44 if not container:
44 if not container:
45 return -1.0
45 return -1.0
46 return sum(container) / len(container)
46 return sum(container) / len(container)
47
47
48
48
49 def keep_cwd(f):
49 def keep_cwd(f):
50 """Decorator that keeps track of the starting working directory."""
50 """Decorator that keeps track of the starting working directory."""
51 @functools.wraps(f)
51 @functools.wraps(f)
52 def wrapped_f(*args, **kwargs):
52 def wrapped_f(*args, **kwargs):
53 cur_dir = os.getcwd()
53 cur_dir = os.getcwd()
54 try:
54 try:
55 return f(*args, **kwargs)
55 return f(*args, **kwargs)
56 finally:
56 finally:
57 os.chdir(cur_dir)
57 os.chdir(cur_dir)
58
58
59 return wrapped_f
59 return wrapped_f
60
60
61
61
62 def timed(f):
62 def timed(f):
63 """Decorator that returns the time it took to execute the function."""
63 """Decorator that returns the time it took to execute the function."""
64 @functools.wraps(f)
64 @functools.wraps(f)
65 def wrapped_f(*args, **kwargs):
65 def wrapped_f(*args, **kwargs):
66 start_time = time.time()
66 start_time = time.time()
67 try:
67 try:
68 f(*args, **kwargs)
68 f(*args, **kwargs)
69 finally:
69 finally:
70 return time.time() - start_time
70 return time.time() - start_time
71
71
72 return wrapped_f
72 return wrapped_f
73
73
74
74
75 def execute(*popenargs, **kwargs):
75 def execute(*popenargs, **kwargs):
76 """Extension of subprocess.check_output to support writing to stdin."""
76 """Extension of subprocess.check_output to support writing to stdin."""
77 input = kwargs.pop('stdin', None)
77 input = kwargs.pop('stdin', None)
78 stdin = None
78 stdin = None
79 if input:
79 if input:
80 stdin = subprocess.PIPE
80 stdin = subprocess.PIPE
81 #if 'stderr' not in kwargs:
81 #if 'stderr' not in kwargs:
82 # kwargs['stderr'] = subprocess.PIPE
82 # kwargs['stderr'] = subprocess.PIPE
83 if 'stdout' in kwargs:
83 if 'stdout' in kwargs:
84 raise ValueError('stdout argument not allowed, it will be overridden.')
84 raise ValueError('stdout argument not allowed, it will be overridden.')
85 process = subprocess.Popen(stdin=stdin, stdout=subprocess.PIPE,
85 process = subprocess.Popen(stdin=stdin, stdout=subprocess.PIPE,
86 *popenargs, **kwargs)
86 *popenargs, **kwargs)
87 output, error = process.communicate(input=input)
87 output, error = process.communicate(input=input)
88 retcode = process.poll()
88 retcode = process.poll()
89 if retcode:
89 if retcode:
90 cmd = kwargs.get("args")
90 cmd = kwargs.get("args")
91 if cmd is None:
91 if cmd is None:
92 cmd = popenargs[0]
92 cmd = popenargs[0]
93 print('{} {} {} '.format(cmd, output, error))
93 print('{} {} {} '.format(cmd, output, error))
94 raise subprocess.CalledProcessError(retcode, cmd, output=output)
94 raise subprocess.CalledProcessError(retcode, cmd, output=output)
95 return output
95 return output
96
96
97
97
98 def get_repo_name(repo_url):
98 def get_repo_name(repo_url):
99 """Extract the repo name from its url."""
99 """Extract the repo name from its url."""
100 repo_url = repo_url.rstrip('/')
100 repo_url = repo_url.rstrip('/')
101 return repo_url.split('/')[-1].split('.')[0]
101 return repo_url.split('/')[-1].split('.')[0]
102
102
103
103
104 class TestPerformanceBase(object):
104 class TestPerformanceBase(object):
105 def __init__(self, base_dir, repo_url, n_commits, max_commits,
105 def __init__(self, base_dir, repo_url, n_commits, max_commits,
106 skip_commits):
106 skip_commits):
107 self.repo_url = repo_url
107 self.repo_url = repo_url
108 self.repo_name = get_repo_name(self.repo_url)
108 self.repo_name = get_repo_name(self.repo_url)
109 self.upstream_repo_name = '%s_upstream' % self.repo_name
109 self.upstream_repo_name = '%s_upstream' % self.repo_name
110 self.base_dir = os.path.abspath(base_dir)
110 self.base_dir = os.path.abspath(base_dir)
111 self.n_commits = n_commits
111 self.n_commits = n_commits
112 self.max_commits = max_commits
112 self.max_commits = max_commits
113 self.skip_commits = skip_commits
113 self.skip_commits = skip_commits
114 self.push_times = []
114 self.push_times = []
115 self.pull_times = []
115 self.pull_times = []
116 self.empty_pull_times = []
116 self.empty_pull_times = []
117 self.clone_time = -1.0
117 self.clone_time = -1.0
118 self.last_commit = None
118 self.last_commit = None
119
119
120 self.cloned_repo = ''
120 self.cloned_repo = ''
121 self.pull_repo = ''
121 self.pull_repo = ''
122 self.orig_repo = ''
122 self.orig_repo = ''
123
123
124 def run(self):
124 def run(self):
125 try:
125 try:
126 self.test()
126 self.test()
127 except Exception as error:
127 except Exception as error:
128 print(error)
128 print(error)
129 finally:
129 finally:
130 self.cleanup()
130 self.cleanup()
131
131
132 print('Clone time :{}'.format(self.clone_time))
132 print('Clone time :{}'.format(self.clone_time))
133 print('Push time :{}'.format(mean(self.push_times)))
133 print('Push time :{}'.format(mean(self.push_times)))
134 print('Pull time :{}'.format(mean(self.pull_times)))
134 print('Pull time :{}'.format(mean(self.pull_times)))
135 print('Empty pull time:{}'.format(mean(self.empty_pull_times)))
135 print('Empty pull time:{}'.format(mean(self.empty_pull_times)))
136
136
137 return {
137 return {
138 'clone': self.clone_time,
138 'clone': self.clone_time,
139 'push': mean(self.push_times),
139 'push': mean(self.push_times),
140 'pull': mean(self.pull_times),
140 'pull': mean(self.pull_times),
141 'empty_pull': mean(self.empty_pull_times),
141 'empty_pull': mean(self.empty_pull_times),
142 }
142 }
143
143
144 @keep_cwd
144 @keep_cwd
145 def test(self):
145 def test(self):
146 os.chdir(self.base_dir)
146 os.chdir(self.base_dir)
147
147
148 self.orig_repo = os.path.join(self.base_dir, self.repo_name)
148 self.orig_repo = os.path.join(self.base_dir, self.repo_name)
149 if not os.path.exists(self.orig_repo):
149 if not os.path.exists(self.orig_repo):
150 self.clone_repo(self.repo_url, default_only=True)
150 self.clone_repo(self.repo_url, default_only=True)
151
151
152 upstream_url = self.create_repo(self.upstream_repo_name, self.repo_type)
152 upstream_url = self.create_repo(self.upstream_repo_name, self.repo_type)
153
153
154 self.add_remote(self.orig_repo, upstream_url)
154 self.add_remote(self.orig_repo, upstream_url)
155
155
156 self.pull_repo = os.path.join(self.base_dir, '%s_pull' % self.repo_name)
156 self.pull_repo = os.path.join(self.base_dir, '%s_pull' % self.repo_name)
157 self.clone_repo(upstream_url, self.pull_repo)
157 self.clone_repo(upstream_url, self.pull_repo)
158
158
159 commits = self.get_commits(self.orig_repo)
159 commits = self.get_commits(self.orig_repo)
160 self.last_commit = commits[-1]
160 self.last_commit = commits[-1]
161 if self.skip_commits:
161 if self.skip_commits:
162 self.push(
162 self.push(
163 self.orig_repo, commits[self.skip_commits - 1], 'upstream')
163 self.orig_repo, commits[self.skip_commits - 1], 'upstream')
164 commits = commits[self.skip_commits:self.max_commits]
164 commits = commits[self.skip_commits:self.max_commits]
165
165
166 print('Working with %d commits' % len(commits))
166 print('Working with %d commits' % len(commits))
167 for i in range(self.n_commits - 1, len(commits), self.n_commits):
167 for i in range(self.n_commits - 1, len(commits), self.n_commits):
168 commit = commits[i]
168 commit = commits[i]
169 print('Processing commit %s (%d)' % (commit, i + 1))
169 print('Processing commit %s (%d)' % (commit, i + 1))
170 self.push_times.append(
170 self.push_times.append(
171 self.push(self.orig_repo, commit, 'upstream'))
171 self.push(self.orig_repo, commit, 'upstream'))
172 self.check_remote_last_commit_is(commit, upstream_url)
172 self.check_remote_last_commit_is(commit, upstream_url)
173
173
174 self.pull_times.append(self.pull(self.pull_repo))
174 self.pull_times.append(self.pull(self.pull_repo))
175 self.check_local_last_commit_is(commit, self.pull_repo)
175 self.check_local_last_commit_is(commit, self.pull_repo)
176
176
177 self.empty_pull_times.append(self.pull(self.pull_repo))
177 self.empty_pull_times.append(self.pull(self.pull_repo))
178
178
179 self.cloned_repo = os.path.join(self.base_dir,
179 self.cloned_repo = os.path.join(self.base_dir,
180 '%s_clone' % self.repo_name)
180 '%s_clone' % self.repo_name)
181 self.clone_time = self.clone_repo(upstream_url, self.cloned_repo)
181 self.clone_time = self.clone_repo(upstream_url, self.cloned_repo)
182
182
183 def cleanup(self):
183 def cleanup(self):
184 try:
184 try:
185 self.delete_repo(self.upstream_repo_name)
185 self.delete_repo(self.upstream_repo_name)
186 except api.ApiError:
186 except api.ApiError:
187 # Continue in case we could not delete the repo. Maybe we did not
187 # Continue in case we could not delete the repo. Maybe we did not
188 # create it in the first place.
188 # create it in the first place.
189 pass
189 pass
190
190
191 shutil.rmtree(self.pull_repo, ignore_errors=True)
191 shutil.rmtree(self.pull_repo, ignore_errors=True)
192 shutil.rmtree(self.cloned_repo, ignore_errors=True)
192 shutil.rmtree(self.cloned_repo, ignore_errors=True)
193
193
194 if os.path.exists(self.orig_repo):
194 if os.path.exists(self.orig_repo):
195 self.remove_remote(self.orig_repo)
195 self.remove_remote(self.orig_repo)
196
196
197
197
198 class RhodeCodeMixin(object):
198 class RhodeCodeMixin(object):
199 """Mixin providing the methods to create and delete repos in RhodeCode."""
199 """Mixin providing the methods to create and delete repos in RhodeCode."""
200 def __init__(self, api_key):
200 def __init__(self, api_key):
201 self.api = api.RCApi(api_key=api_key)
201 self.api = api.RCApi(api_key=api_key)
202
202
203 def create_repo(self, repo_name, repo_type):
203 def create_repo(self, repo_name, repo_type):
204 return self.api.create_repo(repo_name, repo_type,
204 return self.api.create_repo(repo_name, repo_type,
205 'Repo for perfomance testing')
205 'Repo for perfomance testing')
206
206
207 def delete_repo(self, repo_name):
207 def delete_repo(self, repo_name):
208 return self.api.delete_repo(repo_name)
208 return self.api.delete_repo(repo_name)
209
209
210
210
211 class GitMixin(object):
211 class GitMixin(object):
212 """Mixin providing the git operations."""
212 """Mixin providing the git operations."""
213 @timed
213 @timed
214 def clone_repo(self, repo_url, destination=None, default_only=False):
214 def clone_repo(self, repo_url, destination=None, default_only=False):
215 args = ['git', 'clone']
215 args = ['git', 'clone']
216 if default_only:
216 if default_only:
217 args.extend(['--branch', 'master', '--single-branch'])
217 args.extend(['--branch', 'master', '--single-branch'])
218 args.append(repo_url)
218 args.append(repo_url)
219 if destination:
219 if destination:
220 args.append(destination)
220 args.append(destination)
221 execute(args)
221 execute(args)
222
222
223 @keep_cwd
223 @keep_cwd
224 def add_remote(self, repo, remote_url, remote_name='upstream'):
224 def add_remote(self, repo, remote_url, remote_name='upstream'):
225 self.remove_remote(repo, remote_name)
225 self.remove_remote(repo, remote_name)
226 os.chdir(repo)
226 os.chdir(repo)
227 execute(['git', 'remote', 'add', remote_name, remote_url])
227 execute(['git', 'remote', 'add', remote_name, remote_url])
228
228
229 @keep_cwd
229 @keep_cwd
230 def remove_remote(self, repo, remote_name='upstream'):
230 def remove_remote(self, repo, remote_name='upstream'):
231 os.chdir(repo)
231 os.chdir(repo)
232 remotes = execute(['git', 'remote']).split('\n')
232 remotes = execute(['git', 'remote']).split('\n')
233 if remote_name in remotes:
233 if remote_name in remotes:
234 execute(['git', 'remote', 'remove', remote_name])
234 execute(['git', 'remote', 'remove', remote_name])
235
235
236 @keep_cwd
236 @keep_cwd
237 def get_commits(self, repo, branch='master'):
237 def get_commits(self, repo, branch='master'):
238 os.chdir(repo)
238 os.chdir(repo)
239 commits_list = execute(
239 commits_list = execute(
240 ['git', 'log', '--first-parent', branch, '--pretty=%H'])
240 ['git', 'log', '--first-parent', branch, '--pretty=%H'])
241 return commits_list.strip().split('\n')[::-1]
241 return commits_list.strip().split('\n')[::-1]
242
242
243 @timed
243 @timed
244 def push(self, repo, commit, remote_name=None):
244 def push(self, repo, commit, remote_name=None):
245 os.chdir(repo)
245 os.chdir(repo)
246 try:
246 try:
247 execute(['git', 'reset', '--soft', commit])
247 execute(['git', 'reset', '--soft', commit])
248 args = ['git', 'push']
248 args = ['git', 'push']
249 if remote_name:
249 if remote_name:
250 args.append(remote_name)
250 args.append(remote_name)
251 execute(args)
251 execute(args)
252 finally:
252 finally:
253 execute(['git', 'reset', '--soft', 'HEAD@{1}'])
253 execute(['git', 'reset', '--soft', 'HEAD@{1}'])
254
254
255 @timed
255 @timed
256 def pull(self, repo):
256 def pull(self, repo):
257 os.chdir(repo)
257 os.chdir(repo)
258 execute(['git', 'pull'])
258 execute(['git', 'pull'])
259
259
260 def _remote_last_commit(self, repo_url):
260 def _remote_last_commit(self, repo_url):
261 output = execute(['git', 'ls-remote', repo_url, 'HEAD'])
261 output = execute(['git', 'ls-remote', repo_url, 'HEAD'])
262 return output.split()[0]
262 return output.split()[0]
263
263
264 def check_remote_last_commit_is(self, commit, repo_url):
264 def check_remote_last_commit_is(self, commit, repo_url):
265 last_remote_commit = self._remote_last_commit(repo_url)
265 last_remote_commit = self._remote_last_commit(repo_url)
266 if last_remote_commit != commit:
266 if last_remote_commit != commit:
267 raise Exception('Push did not work, expected commit %s but got %s' %
267 raise Exception('Push did not work, expected commit %s but got %s' %
268 (commit, last_remote_commit))
268 (commit, last_remote_commit))
269
269
270 @keep_cwd
270 @keep_cwd
271 def _local_last_commit(self, repo):
271 def _local_last_commit(self, repo):
272 os.chdir(repo)
272 os.chdir(repo)
273 return execute(['git', 'rev-parse', 'HEAD']).strip()
273 return execute(['git', 'rev-parse', 'HEAD']).strip()
274
274
275 def check_local_last_commit_is(self, commit, repo):
275 def check_local_last_commit_is(self, commit, repo):
276 last_local_commit = self._local_last_commit(repo)
276 last_local_commit = self._local_last_commit(repo)
277 if last_local_commit != commit:
277 if last_local_commit != commit:
278 raise Exception('Pull did not work, expected commit %s but got %s' %
278 raise Exception('Pull did not work, expected commit %s but got %s' %
279 (commit, last_local_commit))
279 (commit, last_local_commit))
280
280
281
281
282 class HgMixin(object):
282 class HgMixin(object):
283 """Mixin providing the mercurial operations."""
283 """Mixin providing the mercurial operations."""
284 @timed
284 @timed
285 def clone_repo(self, repo_url, destination=None, default_only=False):
285 def clone_repo(self, repo_url, destination=None, default_only=False):
286 args = ['hg', 'clone']
286 args = ['hg', 'clone']
287 if default_only:
287 if default_only:
288 args.extend(['--branch', 'default'])
288 args.extend(['--branch', 'default'])
289 args.append(repo_url)
289 args.append(repo_url)
290 if destination:
290 if destination:
291 args.append(destination)
291 args.append(destination)
292 execute(args)
292 execute(args)
293
293
294 @keep_cwd
294 @keep_cwd
295 def add_remote(self, repo, remote_url, remote_name='upstream'):
295 def add_remote(self, repo, remote_url, remote_name='upstream'):
296 self.remove_remote(repo, remote_name)
296 self.remove_remote(repo, remote_name)
297 os.chdir(repo)
297 os.chdir(repo)
298 hgrc = ConfigParser.RawConfigParser()
298 hgrc = ConfigParser.RawConfigParser()
299 hgrc.read('.hg/hgrc')
299 hgrc.read('.hg/hgrc')
300 hgrc.set('paths', remote_name, remote_url)
300 hgrc.set('paths', remote_name, remote_url)
301 with open('.hg/hgrc', 'w') as f:
301 with open('.hg/hgrc', 'w') as f:
302 hgrc.write(f)
302 hgrc.write(f)
303
303
304 @keep_cwd
304 @keep_cwd
305 def remove_remote(self, repo, remote_name='upstream'):
305 def remove_remote(self, repo, remote_name='upstream'):
306 os.chdir(repo)
306 os.chdir(repo)
307 hgrc = ConfigParser.RawConfigParser()
307 hgrc = ConfigParser.RawConfigParser()
308 hgrc.read('.hg/hgrc')
308 hgrc.read('.hg/hgrc')
309 hgrc.remove_option('paths', remote_name)
309 hgrc.remove_option('paths', remote_name)
310 with open('.hg/hgrc', 'w') as f:
310 with open('.hg/hgrc', 'w') as f:
311 hgrc.write(f)
311 hgrc.write(f)
312
312
313 @keep_cwd
313 @keep_cwd
314 def get_commits(self, repo, branch='default'):
314 def get_commits(self, repo, branch='default'):
315 os.chdir(repo)
315 os.chdir(repo)
316 # See http://stackoverflow.com/questions/15376649/is-there-a-mercurial-equivalent-to-git-log-first-parent
316 # See http://stackoverflow.com/questions/15376649/is-there-a-mercurial-equivalent-to-git-log-first-parent
317 commits_list = execute(['hg', 'log', '--branch', branch, '--template',
317 commits_list = execute(['hg', 'log', '--branch', branch, '--template',
318 '{node}\n', '--follow-first'])
318 '{node}\n', '--follow-first'])
319 return commits_list.strip().split('\n')[::-1]
319 return commits_list.strip().split('\n')[::-1]
320
320
321 @timed
321 @timed
322 def push(self, repo, commit, remote_name=None):
322 def push(self, repo, commit, remote_name=None):
323 os.chdir(repo)
323 os.chdir(repo)
324 args = ['hg', 'push', '--rev', commit, '--new-branch']
324 args = ['hg', 'push', '--rev', commit, '--new-branch']
325 if remote_name:
325 if remote_name:
326 args.append(remote_name)
326 args.append(remote_name)
327 execute(args)
327 execute(args)
328
328
329 @timed
329 @timed
330 def pull(self, repo):
330 def pull(self, repo):
331 os.chdir(repo)
331 os.chdir(repo)
332 execute(['hg', '--config', 'alias.pull=pull', 'pull', '-u'])
332 execute(['hg', '--config', 'alias.pull=pull', 'pull', '-u'])
333
333
334 def _remote_last_commit(self, repo_url):
334 def _remote_last_commit(self, repo_url):
335 return execute(['hg', 'identify', repo_url])[:12]
335 return execute(['hg', 'identify', repo_url])[:12]
336
336
337 def check_remote_last_commit_is(self, commit, repo_url):
337 def check_remote_last_commit_is(self, commit, repo_url):
338 last_remote_commit = self._remote_last_commit(repo_url)
338 last_remote_commit = self._remote_last_commit(repo_url)
339 if not commit.startswith(last_remote_commit):
339 if not commit.startswith(last_remote_commit):
340 raise Exception('Push did not work, expected commit %s but got %s' %
340 raise Exception('Push did not work, expected commit %s but got %s' %
341 (commit, last_remote_commit))
341 (commit, last_remote_commit))
342
342
343 @keep_cwd
343 @keep_cwd
344 def _local_last_commit(self, repo):
344 def _local_last_commit(self, repo):
345 os.chdir(repo)
345 os.chdir(repo)
346 return execute(['hg', 'identify'])[:12]
346 return execute(['hg', 'identify'])[:12]
347
347
348 def check_local_last_commit_is(self, commit, repo):
348 def check_local_last_commit_is(self, commit, repo):
349 last_local_commit = self._local_last_commit(repo)
349 last_local_commit = self._local_last_commit(repo)
350 if not commit.startswith(last_local_commit):
350 if not commit.startswith(last_local_commit):
351 raise Exception('Pull did not work, expected commit %s but got %s' %
351 raise Exception('Pull did not work, expected commit %s but got %s' %
352 (commit, last_local_commit))
352 (commit, last_local_commit))
353
353
354
354
355 class GitTestPerformance(GitMixin, RhodeCodeMixin, TestPerformanceBase):
355 class GitTestPerformance(GitMixin, RhodeCodeMixin, TestPerformanceBase):
356 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
356 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
357 api_key):
357 api_key):
358 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
358 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
359 max_commits, skip_commits)
359 max_commits, skip_commits)
360 RhodeCodeMixin.__init__(self, api_key)
360 RhodeCodeMixin.__init__(self, api_key)
361 self.repo_type = 'git'
361 self.repo_type = 'git'
362
362
363
363
364 class HgTestPerformance(HgMixin, RhodeCodeMixin, TestPerformanceBase):
364 class HgTestPerformance(HgMixin, RhodeCodeMixin, TestPerformanceBase):
365 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
365 def __init__(self, base_dir, repo_url, n_commits, max_commits, skip_commits,
366 api_key):
366 api_key):
367 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
367 TestPerformanceBase.__init__(self, base_dir, repo_url, n_commits,
368 max_commits, skip_commits)
368 max_commits, skip_commits)
369 RhodeCodeMixin.__init__(self, api_key)
369 RhodeCodeMixin.__init__(self, api_key)
370 self.repo_type = 'hg'
370 self.repo_type = 'hg'
371
371
372
372
373 def get_test(base_dir, repo_url, repo_type, step, max_commits, skip_commits,
373 def get_test(base_dir, repo_url, repo_type, step, max_commits, skip_commits,
374 api_key):
374 api_key):
375 max_commits = min(10 * step,
375 max_commits = min(10 * step,
376 int((max_commits - skip_commits) / step) * step)
376 int((max_commits - skip_commits) / step) * step)
377 max_commits += skip_commits
377 max_commits += skip_commits
378 if repo_type == 'git':
378 if repo_type == 'git':
379 return GitTestPerformance(
379 return GitTestPerformance(
380 base_dir, repo_url, step, max_commits, skip_commits, api_key)
380 base_dir, repo_url, step, max_commits, skip_commits, api_key)
381 elif repo_type == 'hg':
381 elif repo_type == 'hg':
382 return HgTestPerformance(
382 return HgTestPerformance(
383 base_dir, repo_url, step, max_commits, skip_commits, api_key)
383 base_dir, repo_url, step, max_commits, skip_commits, api_key)
384
384
385
385
386 def main(argv):
386 def main(argv):
387 parser = argparse.ArgumentParser(
387 parser = argparse.ArgumentParser(
388 description='Performance tests for push/pull/clone for git and ' +
388 description='Performance tests for push/pull/clone for git and ' +
389 'mercurial repos.')
389 'mercurial repos.')
390 parser.add_argument(
390 parser.add_argument(
391 '--tests', dest='tests', action='store', required=False, default='all',
391 '--tests', dest='tests', action='store', required=False, default='all',
392 help='The tests to run. Default: all. But could be any comma ' +
392 help='The tests to run. Default: all. But could be any comma ' +
393 'separated list with python, hg, kernel or git')
393 'separated list with python, hg, kernel or git')
394 parser.add_argument(
394 parser.add_argument(
395 '--sizes', dest='sizes', action='store', required=False,
395 '--sizes', dest='sizes', action='store', required=False,
396 default='1,10,100,1000,2500',
396 default='1,10,100,1000,2500',
397 help='The sizes to use. Default: 1,10,100,1000,2500')
397 help='The sizes to use. Default: 1,10,100,1000,2500')
398 parser.add_argument(
398 parser.add_argument(
399 '--dir', dest='dir', action='store', required=True,
399 '--dir', dest='dir', action='store', required=True,
400 help='The dir where to store the repos')
400 help='The dir where to store the repos')
401 parser.add_argument(
401 parser.add_argument(
402 '--api-key', dest='api_key', action='store', required=True,
402 '--api-key', dest='api_key', action='store', required=True,
403 help='The api key of RhodeCode')
403 help='The api key of RhodeCode')
404 options = parser.parse_args(argv[1:])
404 options = parser.parse_args(argv[1:])
405 print(options)
405 print(options)
406
406
407 test_config = {
407 test_config = {
408 'python': {
408 'python': {
409 'url': 'https://hg.python.org/cpython/',
409 'url': 'https://hg.python.org/cpython/',
410 'limit': 23322,
410 'limit': 23322,
411 'type': 'hg',
411 'type': 'hg',
412 # Do not time the first commit, as it is HUGE!
412 # Do not time the first commit, as it is HUGE!
413 'skip': 1,
413 'skip': 1,
414 },
414 },
415 'hg': {
415 'hg': {
416 'url': 'http://selenic.com/hg',
416 'url': 'http://selenic.com/hg',
417 'limit': 14396,
417 'limit': 14396,
418 'type': 'hg',
418 'type': 'hg',
419 },
419 },
420 'kernel': {
420 'kernel': {
421 'url': 'https://github.com/torvalds/linux.git',
421 'url': 'https://github.com/torvalds/linux.git',
422 'limit': 46271,
422 'limit': 46271,
423 'type': 'git',
423 'type': 'git',
424 },
424 },
425 'git': {
425 'git': {
426 'url': 'https://github.com/git/git.git',
426 'url': 'https://github.com/git/git.git',
427 'limit': 13525,
427 'limit': 13525,
428 'type': 'git',
428 'type': 'git',
429 }
429 }
430
430
431 }
431 }
432
432
433 test_names = options.tests.split(',')
433 test_names = options.tests.split(',')
434 if test_names == ['all']:
434 if test_names == ['all']:
435 test_names = test_config.keys()
435 test_names = test_config.keys()
436 if not set(test_names) <= set(test_config.keys()):
436 if not set(test_names) <= set(test_config.keys()):
437 print('Invalid tests: only %s are valid but specified %s' %
437 print('Invalid tests: only %s are valid but specified %s' %
438 (test_config.keys(), test_names))
438 (test_config.keys(), test_names))
439 return 1
439 return 1
440
440
441 sizes = options.sizes.split(',')
441 sizes = options.sizes.split(',')
442 sizes = map(int, sizes)
442 sizes = list(map(int, sizes))
443
443
444 base_dir = options.dir
444 base_dir = options.dir
445 api_key = options.api_key
445 api_key = options.api_key
446 results = collections.defaultdict(dict)
446 results = collections.defaultdict(dict)
447 for test_name, size in itertools.product(test_names, sizes):
447 for test_name, size in itertools.product(test_names, sizes):
448 test = get_test(base_dir,
448 test = get_test(base_dir,
449 test_config[test_name]['url'],
449 test_config[test_name]['url'],
450 test_config[test_name]['type'],
450 test_config[test_name]['type'],
451 size,
451 size,
452 test_config[test_name]['limit'],
452 test_config[test_name]['limit'],
453 test_config[test_name].get('skip', 0),
453 test_config[test_name].get('skip', 0),
454 api_key)
454 api_key)
455 print('*' * 80)
455 print('*' * 80)
456 print('Running performance test: %s with size %d' % (test_name, size))
456 print('Running performance test: %s with size %d' % (test_name, size))
457 print('*' * 80)
457 print('*' * 80)
458 results[test_name][size] = test.run()
458 results[test_name][size] = test.run()
459 pprint.pprint(dict(results))
459 pprint.pprint(dict(results))
460
460
461
461
462 if __name__ == '__main__':
462 if __name__ == '__main__':
463 sys.exit(main(sys.argv))
463 sys.exit(main(sys.argv))
@@ -1,472 +1,486 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import threading
21 import threading
22 import time
22 import time
23 import logging
23 import logging
24 import os.path
24 import os.path
25 import subprocess
25 import subprocess
26 import tempfile
26 import tempfile
27 import urllib.request, urllib.error, urllib.parse
27 import urllib.request
28 import urllib.error
29 import urllib.parse
28 from lxml.html import fromstring, tostring
30 from lxml.html import fromstring, tostring
29 from lxml.cssselect import CSSSelector
31 from lxml.cssselect import CSSSelector
30 from urllib.parse import unquote_plus
32 from urllib.parse import unquote_plus
31 import webob
33 import webob
32
34
33 from webtest.app import TestResponse, TestApp
35 from webtest.app import TestResponse, TestApp
34 from webtest.compat import print_stderr
36 from webtest.compat import print_stderr
35
37
36 import pytest
38 import pytest
37
39
38 try:
40 try:
39 import rc_testdata
41 import rc_testdata
40 except ImportError:
42 except ImportError:
41 raise ImportError('Failed to import rc_testdata, '
43 raise ImportError('Failed to import rc_testdata, '
42 'please make sure this package is installed from requirements_test.txt')
44 'please make sure this package is installed from requirements_test.txt')
43
45
44 from rhodecode.model.db import User, Repository
46 from rhodecode.model.db import User, Repository
45 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
46 from rhodecode.model.scm import ScmModel
48 from rhodecode.model.scm import ScmModel
47 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
49 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
48 from rhodecode.lib.vcs.backends.base import EmptyCommit
50 from rhodecode.lib.vcs.backends.base import EmptyCommit
49 from rhodecode.tests import login_user_session
51 from rhodecode.tests import login_user_session
50
52
51 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
52
54
53
55
54 class CustomTestResponse(TestResponse):
56 class CustomTestResponse(TestResponse):
55
57
56 def _save_output(self, out):
58 def _save_output(self, out):
57 f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html')
59 f = tempfile.NamedTemporaryFile(mode='w', delete=False, prefix='rc-test-', suffix='.html')
58 f.write(out)
60 f.write(out)
59 return f.name
61 return f.name
60
62
61 def mustcontain(self, *strings, **kw):
63 def mustcontain(self, *strings, **kw):
62 """
64 """
63 Assert that the response contains all of the strings passed
65 Assert that the response contains all of the strings passed
64 in as arguments.
66 in as arguments.
65
67
66 Equivalent to::
68 Equivalent to::
67
69
68 assert string in res
70 assert string in res
69 """
71 """
70 print_body = kw.pop('print_body', False)
72 print_body = kw.pop('print_body', False)
71 if 'no' in kw:
73 if 'no' in kw:
72 no = kw['no']
74 no = kw['no']
73 del kw['no']
75 del kw['no']
74 if isinstance(no, str):
76 if isinstance(no, str):
75 no = [no]
77 no = [no]
76 else:
78 else:
77 no = []
79 no = []
78 if kw:
80 if kw:
79 raise TypeError(
81 raise TypeError(
80 "The only keyword argument allowed is 'no' got %s" % kw)
82 "The only keyword argument allowed is 'no' got %s" % kw)
81
83
82 f = self._save_output(str(self))
84 f = self._save_output(str(self))
83
85
84 for s in strings:
86 for s in strings:
85 if not s in self:
87 if s not in self:
86 print_stderr("Actual response (no %r):" % s)
88 print_stderr("Actual response (no %r):" % s)
87 print_stderr("body output saved as `%s`" % f)
89 print_stderr("body output saved as `%s`" % f)
88 if print_body:
90 if print_body:
89 print_stderr(str(self))
91 print_stderr(str(self))
90 raise IndexError(
92 raise IndexError(
91 "Body does not contain string %r, body output saved as %s" % (s, f))
93 "Body does not contain string %r, body output saved as %s" % (s, f))
92
94
93 for no_s in no:
95 for no_s in no:
94 if no_s in self:
96 if no_s in self:
95 print_stderr("Actual response (has %r)" % no_s)
97 print_stderr("Actual response (has %r)" % no_s)
96 print_stderr("body output saved as `%s`" % f)
98 print_stderr("body output saved as `%s`" % f)
97 if print_body:
99 if print_body:
98 print_stderr(str(self))
100 print_stderr(str(self))
99 raise IndexError(
101 raise IndexError(
100 "Body contains bad string %r, body output saved as %s" % (no_s, f))
102 "Body contains bad string %r, body output saved as %s" % (no_s, f))
101
103
102 def assert_response(self):
104 def assert_response(self):
103 return AssertResponse(self)
105 return AssertResponse(self)
104
106
105 def get_session_from_response(self):
107 def get_session_from_response(self):
106 """
108 """
107 This returns the session from a response object.
109 This returns the session from a response object.
108 """
110 """
109 from rhodecode.lib.rc_beaker import session_factory_from_settings
111 from rhodecode.lib.rc_beaker import session_factory_from_settings
110 session = session_factory_from_settings(self.test_app._pyramid_settings)
112 session = session_factory_from_settings(self.test_app._pyramid_settings)
111 return session(self.request)
113 return session(self.request)
112
114
113
115
114 class TestRequest(webob.BaseRequest):
116 class TestRequest(webob.BaseRequest):
115
117
116 # for py.test
118 # for py.test, so it doesn't try to run this tas by name starting with test...
117 disabled = True
119 disabled = True
118 ResponseClass = CustomTestResponse
120 ResponseClass = CustomTestResponse
119
121
120 def add_response_callback(self, callback):
122 def add_response_callback(self, callback):
121 pass
123 pass
122
124
125 @classmethod
126 def blank(cls, path, environ=None, base_url=None,
127 headers=None, POST=None, **kw):
128
129 if not path.isascii():
130 # our custom quote path if it contains non-ascii chars
131 path = urllib.parse.quote(path)
132
133 return super(TestRequest, cls).blank(
134 path, environ=environ, base_url=base_url, headers=headers, POST=POST, **kw)
135
123
136
124 class CustomTestApp(TestApp):
137 class CustomTestApp(TestApp):
125 """
138 """
126 Custom app to make mustcontain more Useful, and extract special methods
139 Custom app to make mustcontain more Useful, and extract special methods
127 """
140 """
128 RequestClass = TestRequest
141 RequestClass = TestRequest
129 rc_login_data = {}
142 rc_login_data = {}
130 rc_current_session = None
143 rc_current_session = None
131
144
132 def login(self, username=None, password=None):
145 def login(self, username=None, password=None):
133 from rhodecode.lib import auth
146 from rhodecode.lib import auth
134
147
135 if username and password:
148 if username and password:
136 session = login_user_session(self, username, password)
149 session = login_user_session(self, username, password)
137 else:
150 else:
138 session = login_user_session(self)
151 session = login_user_session(self)
139
152
140 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
153 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
141 self.rc_current_session = session
154 self.rc_current_session = session
142 return session['rhodecode_user']
155 return session['rhodecode_user']
143
156
144 @property
157 @property
145 def csrf_token(self):
158 def csrf_token(self):
146 return self.rc_login_data['csrf_token']
159 return self.rc_login_data['csrf_token']
147
160
148 @property
161 @property
149 def _pyramid_registry(self):
162 def _pyramid_registry(self):
150 return self.app.config.registry
163 return self.app.config.registry
151
164
152 @property
165 @property
153 def _pyramid_settings(self):
166 def _pyramid_settings(self):
154 return self._pyramid_registry.settings
167 return self._pyramid_registry.settings
155
168
156
169
157 def set_anonymous_access(enabled):
170 def set_anonymous_access(enabled):
158 """(Dis)allows anonymous access depending on parameter `enabled`"""
171 """(Dis)allows anonymous access depending on parameter `enabled`"""
159 user = User.get_default_user()
172 user = User.get_default_user()
160 user.active = enabled
173 user.active = enabled
161 Session().add(user)
174 Session().add(user)
162 Session().commit()
175 Session().commit()
163 time.sleep(1.5) # must sleep for cache (1s to expire)
176 time.sleep(1.5) # must sleep for cache (1s to expire)
164 log.info('anonymous access is now: %s', enabled)
177 log.info('anonymous access is now: %s', enabled)
165 assert enabled == User.get_default_user().active, (
178 assert enabled == User.get_default_user().active, (
166 'Cannot set anonymous access')
179 'Cannot set anonymous access')
167
180
168
181
169 def check_xfail_backends(node, backend_alias):
182 def check_xfail_backends(node, backend_alias):
170 # Using "xfail_backends" here intentionally, since this marks work
183 # Using "xfail_backends" here intentionally, since this marks work
171 # which is "to be done" soon.
184 # which is "to be done" soon.
172 skip_marker = node.get_closest_marker('xfail_backends')
185 skip_marker = node.get_closest_marker('xfail_backends')
173 if skip_marker and backend_alias in skip_marker.args:
186 if skip_marker and backend_alias in skip_marker.args:
174 msg = "Support for backend %s to be developed." % (backend_alias, )
187 msg = "Support for backend %s to be developed." % (backend_alias, )
175 msg = skip_marker.kwargs.get('reason', msg)
188 msg = skip_marker.kwargs.get('reason', msg)
176 pytest.xfail(msg)
189 pytest.xfail(msg)
177
190
178
191
179 def check_skip_backends(node, backend_alias):
192 def check_skip_backends(node, backend_alias):
180 # Using "skip_backends" here intentionally, since this marks work which is
193 # Using "skip_backends" here intentionally, since this marks work which is
181 # not supported.
194 # not supported.
182 skip_marker = node.get_closest_marker('skip_backends')
195 skip_marker = node.get_closest_marker('skip_backends')
183 if skip_marker and backend_alias in skip_marker.args:
196 if skip_marker and backend_alias in skip_marker.args:
184 msg = "Feature not supported for backend %s." % (backend_alias, )
197 msg = "Feature not supported for backend %s." % (backend_alias, )
185 msg = skip_marker.kwargs.get('reason', msg)
198 msg = skip_marker.kwargs.get('reason', msg)
186 pytest.skip(msg)
199 pytest.skip(msg)
187
200
188
201
189 def extract_git_repo_from_dump(dump_name, repo_name):
202 def extract_git_repo_from_dump(dump_name, repo_name):
190 """Create git repo `repo_name` from dump `dump_name`."""
203 """Create git repo `repo_name` from dump `dump_name`."""
191 repos_path = ScmModel().repos_path
204 repos_path = ScmModel().repos_path
192 target_path = os.path.join(repos_path, repo_name)
205 target_path = os.path.join(repos_path, repo_name)
193 rc_testdata.extract_git_dump(dump_name, target_path)
206 rc_testdata.extract_git_dump(dump_name, target_path)
194 return target_path
207 return target_path
195
208
196
209
197 def extract_hg_repo_from_dump(dump_name, repo_name):
210 def extract_hg_repo_from_dump(dump_name, repo_name):
198 """Create hg repo `repo_name` from dump `dump_name`."""
211 """Create hg repo `repo_name` from dump `dump_name`."""
199 repos_path = ScmModel().repos_path
212 repos_path = ScmModel().repos_path
200 target_path = os.path.join(repos_path, repo_name)
213 target_path = os.path.join(repos_path, repo_name)
201 rc_testdata.extract_hg_dump(dump_name, target_path)
214 rc_testdata.extract_hg_dump(dump_name, target_path)
202 return target_path
215 return target_path
203
216
204
217
205 def extract_svn_repo_from_dump(dump_name, repo_name):
218 def extract_svn_repo_from_dump(dump_name, repo_name):
206 """Create a svn repo `repo_name` from dump `dump_name`."""
219 """Create a svn repo `repo_name` from dump `dump_name`."""
207 repos_path = ScmModel().repos_path
220 repos_path = ScmModel().repos_path
208 target_path = os.path.join(repos_path, repo_name)
221 target_path = os.path.join(repos_path, repo_name)
209 SubversionRepository(target_path, create=True)
222 SubversionRepository(target_path, create=True)
210 _load_svn_dump_into_repo(dump_name, target_path)
223 _load_svn_dump_into_repo(dump_name, target_path)
211 return target_path
224 return target_path
212
225
213
226
214 def assert_message_in_log(log_records, message, levelno, module):
227 def assert_message_in_log(log_records, message, levelno, module):
215 messages = [
228 messages = [
216 r.message for r in log_records
229 r.message for r in log_records
217 if r.module == module and r.levelno == levelno
230 if r.module == module and r.levelno == levelno
218 ]
231 ]
219 assert message in messages
232 assert message in messages
220
233
221
234
222 def _load_svn_dump_into_repo(dump_name, repo_path):
235 def _load_svn_dump_into_repo(dump_name, repo_path):
223 """
236 """
224 Utility to populate a svn repository with a named dump
237 Utility to populate a svn repository with a named dump
225
238
226 Currently the dumps are in rc_testdata. They might later on be
239 Currently the dumps are in rc_testdata. They might later on be
227 integrated with the main repository once they stabilize more.
240 integrated with the main repository once they stabilize more.
228 """
241 """
229 dump = rc_testdata.load_svn_dump(dump_name)
242 dump = rc_testdata.load_svn_dump(dump_name)
230 load_dump = subprocess.Popen(
243 load_dump = subprocess.Popen(
231 ['svnadmin', 'load', repo_path],
244 ['svnadmin', 'load', repo_path],
232 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
245 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
233 stderr=subprocess.PIPE)
246 stderr=subprocess.PIPE)
234 out, err = load_dump.communicate(dump)
247 out, err = load_dump.communicate(dump)
235 if load_dump.returncode != 0:
248 if load_dump.returncode != 0:
236 log.error("Output of load_dump command: %s", out)
249 log.error("Output of load_dump command: %s", out)
237 log.error("Error output of load_dump command: %s", err)
250 log.error("Error output of load_dump command: %s", err)
238 raise Exception(
251 raise Exception(
239 'Failed to load dump "%s" into repository at path "%s".'
252 'Failed to load dump "%s" into repository at path "%s".'
240 % (dump_name, repo_path))
253 % (dump_name, repo_path))
241
254
242
255
243 class AssertResponse(object):
256 class AssertResponse(object):
244 """
257 """
245 Utility that helps to assert things about a given HTML response.
258 Utility that helps to assert things about a given HTML response.
246 """
259 """
247
260
248 def __init__(self, response):
261 def __init__(self, response):
249 self.response = response
262 self.response = response
250
263
251 def get_imports(self):
264 def get_imports(self):
252 return fromstring, tostring, CSSSelector
265 return fromstring, tostring, CSSSelector
253
266
254 def one_element_exists(self, css_selector):
267 def one_element_exists(self, css_selector):
255 self.get_element(css_selector)
268 self.get_element(css_selector)
256
269
257 def no_element_exists(self, css_selector):
270 def no_element_exists(self, css_selector):
258 assert not self._get_elements(css_selector)
271 assert not self._get_elements(css_selector)
259
272
260 def element_equals_to(self, css_selector, expected_content):
273 def element_equals_to(self, css_selector, expected_content):
261 element = self.get_element(css_selector)
274 element = self.get_element(css_selector)
262 element_text = self._element_to_string(element)
275 element_text = self._element_to_string(element)
276
263 assert expected_content in element_text
277 assert expected_content in element_text
264
278
265 def element_contains(self, css_selector, expected_content):
279 def element_contains(self, css_selector, expected_content):
266 element = self.get_element(css_selector)
280 element = self.get_element(css_selector)
267 assert expected_content in element.text_content()
281 assert expected_content in element.text_content()
268
282
269 def element_value_contains(self, css_selector, expected_content):
283 def element_value_contains(self, css_selector, expected_content):
270 element = self.get_element(css_selector)
284 element = self.get_element(css_selector)
271 assert expected_content in element.value
285 assert expected_content in element.value
272
286
273 def contains_one_link(self, link_text, href):
287 def contains_one_link(self, link_text, href):
274 fromstring, tostring, CSSSelector = self.get_imports()
288 fromstring, tostring, CSSSelector = self.get_imports()
275 doc = fromstring(self.response.body)
289 doc = fromstring(self.response.body)
276 sel = CSSSelector('a[href]')
290 sel = CSSSelector('a[href]')
277 elements = [
291 elements = [
278 e for e in sel(doc) if e.text_content().strip() == link_text]
292 e for e in sel(doc) if e.text_content().strip() == link_text]
279 assert len(elements) == 1, "Did not find link or found multiple links"
293 assert len(elements) == 1, "Did not find link or found multiple links"
280 self._ensure_url_equal(elements[0].attrib.get('href'), href)
294 self._ensure_url_equal(elements[0].attrib.get('href'), href)
281
295
282 def contains_one_anchor(self, anchor_id):
296 def contains_one_anchor(self, anchor_id):
283 fromstring, tostring, CSSSelector = self.get_imports()
297 fromstring, tostring, CSSSelector = self.get_imports()
284 doc = fromstring(self.response.body)
298 doc = fromstring(self.response.body)
285 sel = CSSSelector('#' + anchor_id)
299 sel = CSSSelector('#' + anchor_id)
286 elements = sel(doc)
300 elements = sel(doc)
287 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
301 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
288
302
289 def _ensure_url_equal(self, found, expected):
303 def _ensure_url_equal(self, found, expected):
290 assert _Url(found) == _Url(expected)
304 assert _Url(found) == _Url(expected)
291
305
292 def get_element(self, css_selector):
306 def get_element(self, css_selector):
293 elements = self._get_elements(css_selector)
307 elements = self._get_elements(css_selector)
294 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
308 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
295 return elements[0]
309 return elements[0]
296
310
297 def get_elements(self, css_selector):
311 def get_elements(self, css_selector):
298 return self._get_elements(css_selector)
312 return self._get_elements(css_selector)
299
313
300 def _get_elements(self, css_selector):
314 def _get_elements(self, css_selector):
301 fromstring, tostring, CSSSelector = self.get_imports()
315 fromstring, tostring, CSSSelector = self.get_imports()
302 doc = fromstring(self.response.body)
316 doc = fromstring(self.response.body)
303 sel = CSSSelector(css_selector)
317 sel = CSSSelector(css_selector)
304 elements = sel(doc)
318 elements = sel(doc)
305 return elements
319 return elements
306
320
307 def _element_to_string(self, element):
321 def _element_to_string(self, element):
308 fromstring, tostring, CSSSelector = self.get_imports()
322 fromstring, tostring, CSSSelector = self.get_imports()
309 return tostring(element)
323 return tostring(element, encoding='unicode')
310
324
311
325
312 class _Url(object):
326 class _Url(object):
313 """
327 """
314 A url object that can be compared with other url orbjects
328 A url object that can be compared with other url orbjects
315 without regard to the vagaries of encoding, escaping, and ordering
329 without regard to the vagaries of encoding, escaping, and ordering
316 of parameters in query strings.
330 of parameters in query strings.
317
331
318 Inspired by
332 Inspired by
319 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
333 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
320 """
334 """
321
335
322 def __init__(self, url):
336 def __init__(self, url):
323 parts = urllib.parse.urlparse(url)
337 parts = urllib.parse.urlparse(url)
324 _query = frozenset(urllib.parse.parse_qsl(parts.query))
338 _query = frozenset(urllib.parse.parse_qsl(parts.query))
325 _path = unquote_plus(parts.path)
339 _path = unquote_plus(parts.path)
326 parts = parts._replace(query=_query, path=_path)
340 parts = parts._replace(query=_query, path=_path)
327 self.parts = parts
341 self.parts = parts
328
342
329 def __eq__(self, other):
343 def __eq__(self, other):
330 return self.parts == other.parts
344 return self.parts == other.parts
331
345
332 def __hash__(self):
346 def __hash__(self):
333 return hash(self.parts)
347 return hash(self.parts)
334
348
335
349
336 def run_test_concurrently(times, raise_catched_exc=True):
350 def run_test_concurrently(times, raise_catched_exc=True):
337 """
351 """
338 Add this decorator to small pieces of code that you want to test
352 Add this decorator to small pieces of code that you want to test
339 concurrently
353 concurrently
340
354
341 ex:
355 ex:
342
356
343 @test_concurrently(25)
357 @test_concurrently(25)
344 def my_test_function():
358 def my_test_function():
345 ...
359 ...
346 """
360 """
347 def test_concurrently_decorator(test_func):
361 def test_concurrently_decorator(test_func):
348 def wrapper(*args, **kwargs):
362 def wrapper(*args, **kwargs):
349 exceptions = []
363 exceptions = []
350
364
351 def call_test_func():
365 def call_test_func():
352 try:
366 try:
353 test_func(*args, **kwargs)
367 test_func(*args, **kwargs)
354 except Exception as e:
368 except Exception as e:
355 exceptions.append(e)
369 exceptions.append(e)
356 if raise_catched_exc:
370 if raise_catched_exc:
357 raise
371 raise
358 threads = []
372 threads = []
359 for i in range(times):
373 for i in range(times):
360 threads.append(threading.Thread(target=call_test_func))
374 threads.append(threading.Thread(target=call_test_func))
361 for t in threads:
375 for t in threads:
362 t.start()
376 t.start()
363 for t in threads:
377 for t in threads:
364 t.join()
378 t.join()
365 if exceptions:
379 if exceptions:
366 raise Exception(
380 raise Exception(
367 'test_concurrently intercepted %s exceptions: %s' % (
381 'test_concurrently intercepted %s exceptions: %s' % (
368 len(exceptions), exceptions))
382 len(exceptions), exceptions))
369 return wrapper
383 return wrapper
370 return test_concurrently_decorator
384 return test_concurrently_decorator
371
385
372
386
373 def wait_for_url(url, timeout=10):
387 def wait_for_url(url, timeout=10):
374 """
388 """
375 Wait until URL becomes reachable.
389 Wait until URL becomes reachable.
376
390
377 It polls the URL until the timeout is reached or it became reachable.
391 It polls the URL until the timeout is reached or it became reachable.
378 If will call to `py.test.fail` in case the URL is not reachable.
392 If will call to `py.test.fail` in case the URL is not reachable.
379 """
393 """
380 timeout = time.time() + timeout
394 timeout = time.time() + timeout
381 last = 0
395 last = 0
382 wait = 0.1
396 wait = 0.1
383
397
384 while timeout > last:
398 while timeout > last:
385 last = time.time()
399 last = time.time()
386 if is_url_reachable(url):
400 if is_url_reachable(url):
387 break
401 break
388 elif (last + wait) > time.time():
402 elif (last + wait) > time.time():
389 # Go to sleep because not enough time has passed since last check.
403 # Go to sleep because not enough time has passed since last check.
390 time.sleep(wait)
404 time.sleep(wait)
391 else:
405 else:
392 pytest.fail("Timeout while waiting for URL {}".format(url))
406 pytest.fail("Timeout while waiting for URL {}".format(url))
393
407
394
408
395 def is_url_reachable(url):
409 def is_url_reachable(url):
396 try:
410 try:
397 urllib.request.urlopen(url)
411 urllib.request.urlopen(url)
398 except urllib.error.URLError:
412 except urllib.error.URLError:
399 log.exception('URL `{}` reach error'.format(url))
413 log.exception('URL `{}` reach error'.format(url))
400 return False
414 return False
401 return True
415 return True
402
416
403
417
404 def repo_on_filesystem(repo_name):
418 def repo_on_filesystem(repo_name):
405 from rhodecode.lib import vcs
419 from rhodecode.lib import vcs
406 from rhodecode.tests import TESTS_TMP_PATH
420 from rhodecode.tests import TESTS_TMP_PATH
407 repo = vcs.get_vcs_instance(
421 repo = vcs.get_vcs_instance(
408 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
422 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
409 return repo is not None
423 return repo is not None
410
424
411
425
412 def commit_change(
426 def commit_change(
413 repo, filename, content, message, vcs_type, parent=None, newfile=False):
427 repo, filename, content, message, vcs_type, parent=None, newfile=False):
414 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
428 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
415
429
416 repo = Repository.get_by_repo_name(repo)
430 repo = Repository.get_by_repo_name(repo)
417 _commit = parent
431 _commit = parent
418 if not parent:
432 if not parent:
419 _commit = EmptyCommit(alias=vcs_type)
433 _commit = EmptyCommit(alias=vcs_type)
420
434
421 if newfile:
435 if newfile:
422 nodes = {
436 nodes = {
423 filename: {
437 filename: {
424 'content': content
438 'content': content
425 }
439 }
426 }
440 }
427 commit = ScmModel().create_nodes(
441 commit = ScmModel().create_nodes(
428 user=TEST_USER_ADMIN_LOGIN, repo=repo,
442 user=TEST_USER_ADMIN_LOGIN, repo=repo,
429 message=message,
443 message=message,
430 nodes=nodes,
444 nodes=nodes,
431 parent_commit=_commit,
445 parent_commit=_commit,
432 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
446 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
433 )
447 )
434 else:
448 else:
435 commit = ScmModel().commit_change(
449 commit = ScmModel().commit_change(
436 repo=repo.scm_instance(), repo_name=repo.repo_name,
450 repo=repo.scm_instance(), repo_name=repo.repo_name,
437 commit=parent, user=TEST_USER_ADMIN_LOGIN,
451 commit=parent, user=TEST_USER_ADMIN_LOGIN,
438 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
452 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
439 message=message,
453 message=message,
440 content=content,
454 content=content,
441 f_path=filename
455 f_path=filename
442 )
456 )
443 return commit
457 return commit
444
458
445
459
446 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
460 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
447 if not default:
461 if not default:
448 raise ValueError('Permission for default user must be given')
462 raise ValueError('Permission for default user must be given')
449 form_data = [(
463 form_data = [(
450 'csrf_token', csrf_token
464 'csrf_token', csrf_token
451 )]
465 )]
452 # add default
466 # add default
453 form_data.extend([
467 form_data.extend([
454 ('u_perm_1', default)
468 ('u_perm_1', default)
455 ])
469 ])
456
470
457 if grant:
471 if grant:
458 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
472 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
459 form_data.extend([
473 form_data.extend([
460 ('perm_new_member_perm_new{}'.format(cnt), perm),
474 ('perm_new_member_perm_new{}'.format(cnt), perm),
461 ('perm_new_member_id_new{}'.format(cnt), obj_id),
475 ('perm_new_member_id_new{}'.format(cnt), obj_id),
462 ('perm_new_member_name_new{}'.format(cnt), obj_name),
476 ('perm_new_member_name_new{}'.format(cnt), obj_name),
463 ('perm_new_member_type_new{}'.format(cnt), obj_type),
477 ('perm_new_member_type_new{}'.format(cnt), obj_type),
464
478
465 ])
479 ])
466 if revoke:
480 if revoke:
467 for obj_id, obj_type in revoke:
481 for obj_id, obj_type in revoke:
468 form_data.extend([
482 form_data.extend([
469 ('perm_del_member_id_{}'.format(obj_id), obj_id),
483 ('perm_del_member_id_{}'.format(obj_id), obj_id),
470 ('perm_del_member_type_{}'.format(obj_id), obj_type),
484 ('perm_del_member_type_{}'.format(obj_id), obj_type),
471 ])
485 ])
472 return form_data
486 return form_data
@@ -1,193 +1,199 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base for test suite for making push/pull operations.
22 Base for test suite for making push/pull operations.
23
23
24 .. important::
24 .. important::
25
25
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 to redirect things to stderr instead of stdout.
27 to redirect things to stderr instead of stdout.
28 """
28 """
29
29
30 from os.path import join as jn
30 from os.path import join as jn
31 from subprocess import Popen, PIPE
31 from subprocess import Popen, PIPE
32 import logging
32 import logging
33 import os
33 import os
34 import tempfile
34 import tempfile
35
35
36 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.tests import GIT_REPO, HG_REPO
37 from rhodecode.tests import GIT_REPO, HG_REPO
37
38
38 DEBUG = True
39 DEBUG = True
39 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
40 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
40 REPO_GROUP = 'a_repo_group'
41 REPO_GROUP = 'a_repo_group'
41 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
42 HG_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, HG_REPO)
42 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
43 GIT_REPO_WITH_GROUP = '%s/%s' % (REPO_GROUP, GIT_REPO)
43
44
44 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
45
46
46
47
47 class Command(object):
48 class Command(object):
48
49
49 def __init__(self, cwd):
50 def __init__(self, cwd):
50 self.cwd = cwd
51 self.cwd = cwd
51 self.process = None
52 self.process = None
52
53
53 def execute(self, cmd, *args):
54 def execute(self, cmd, *args):
54 """
55 """
55 Runs command on the system with given ``args``.
56 Runs command on the system with given ``args``.
56 """
57 """
57
58
58 command = cmd + ' ' + ' '.join(args)
59 command = cmd + ' ' + ' '.join(args)
59 if DEBUG:
60 if DEBUG:
60 log.debug('*** CMD %s ***', command)
61 log.debug('*** CMD %s ***', command)
61
62
62 env = dict(os.environ)
63 env = dict(os.environ)
63 # Delete coverage variables, as they make the test fail for Mercurial
64 # Delete coverage variables, as they make the test fail for Mercurial
64 for key in env.keys():
65 for key in env.keys():
65 if key.startswith('COV_CORE_'):
66 if key.startswith('COV_CORE_'):
66 del env[key]
67 del env[key]
67
68
68 self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
69 self.process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE,
69 cwd=self.cwd, env=env)
70 cwd=self.cwd, env=env)
70 stdout, stderr = self.process.communicate()
71 stdout, stderr = self.process.communicate()
72
73 stdout = safe_str(stdout)
74 stderr = safe_str(stderr)
75
71 if DEBUG:
76 if DEBUG:
72 log.debug('STDOUT:%s', stdout)
77 log.debug('STDOUT:%s', stdout)
73 log.debug('STDERR:%s', stderr)
78 log.debug('STDERR:%s', stderr)
74 return stdout, stderr
79 return stdout, stderr
75
80
76 def assert_returncode_success(self):
81 def assert_returncode_success(self):
77 assert self.process.returncode == 0
82 assert self.process.returncode == 0
78
83
79
84
80 def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None, new_branch=False, **kwargs):
85 def _add_files(vcs, dest, clone_url=None, tags=None, target_branch=None, new_branch=False, **kwargs):
81 git_ident = "git config user.name {} && git config user.email {}".format(
86 full_name = 'Marcin KuΕΊminski'
82 'Marcin KuΕΊminski', 'me@email.com')
87 email = 'me@email.com'
88 git_ident = f"git config user.name {full_name} && git config user.email {email}"
83 cwd = path = jn(dest)
89 cwd = path = jn(dest)
84
90
85 tags = tags or []
91 tags = tags or []
86 added_file = jn(path, '{}_setup.py'.format(next(tempfile._RandomNameSequence())))
92 added_file = jn(path, '{}_setup.py'.format(next(tempfile._RandomNameSequence())))
87 Command(cwd).execute('touch %s' % added_file)
93 Command(cwd).execute('touch %s' % added_file)
88 Command(cwd).execute('%s add %s' % (vcs, added_file))
94 Command(cwd).execute('%s add %s' % (vcs, added_file))
89 author_str = 'Marcin KuΕΊminski <me@email.com>'
95 author_str = 'Marcin KuΕΊminski <me@email.com>'
90
96
91 for i in range(kwargs.get('files_no', 3)):
97 for i in range(kwargs.get('files_no', 3)):
92 cmd = """echo 'added_line%s' >> %s""" % (i, added_file)
98 cmd = """echo 'added_line%s' >> %s""" % (i, added_file)
93 Command(cwd).execute(cmd)
99 Command(cwd).execute(cmd)
94
100
95 if vcs == 'hg':
101 if vcs == 'hg':
96 cmd = """hg commit -m 'committed new %s' -u '%s' %s """ % (
102 cmd = """hg commit -m 'committed new %s' -u '%s' %s """ % (
97 i, author_str, added_file
103 i, author_str, added_file
98 )
104 )
99 elif vcs == 'git':
105 elif vcs == 'git':
100 cmd = """%s && git commit -m 'committed new %s' %s""" % (
106 cmd = """%s && git commit -m 'committed new %s' %s""" % (
101 git_ident, i, added_file)
107 git_ident, i, added_file)
102 Command(cwd).execute(cmd)
108 Command(cwd).execute(cmd)
103
109
104 for tag in tags:
110 for tag in tags:
105 if vcs == 'hg':
111 if vcs == 'hg':
106 Command(cwd).execute(
112 Command(cwd).execute(
107 'hg tag -m "{}" -u "{}" '.format(tag['commit'], author_str), tag['name'])
113 'hg tag -m "{}" -u "{}" '.format(tag['commit'], author_str), tag['name'])
108 elif vcs == 'git':
114 elif vcs == 'git':
109 if tag['commit']:
115 if tag['commit']:
110 # annotated tag
116 # annotated tag
111 _stdout, _stderr = Command(cwd).execute(
117 _stdout, _stderr = Command(cwd).execute(
112 """%s && git tag -a %s -m "%s" """ % (
118 """%s && git tag -a %s -m "%s" """ % (
113 git_ident, tag['name'], tag['commit']))
119 git_ident, tag['name'], tag['commit']))
114 else:
120 else:
115 # lightweight tag
121 # lightweight tag
116 _stdout, _stderr = Command(cwd).execute(
122 _stdout, _stderr = Command(cwd).execute(
117 """%s && git tag %s""" % (
123 """%s && git tag %s""" % (
118 git_ident, tag['name']))
124 git_ident, tag['name']))
119
125
120
126
121 def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None,
127 def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None,
122 new_branch=False, **kwargs):
128 new_branch=False, **kwargs):
123 """
129 """
124 Generate some files, add it to DEST repo and push back
130 Generate some files, add it to DEST repo and push back
125 vcs is git or hg and defines what VCS we want to make those files for
131 vcs is git or hg and defines what VCS we want to make those files for
126 """
132 """
127 git_ident = "git config user.name {} && git config user.email {}".format(
133 git_ident = "git config user.name {} && git config user.email {}".format(
128 'Marcin KuΕΊminski', 'me@email.com')
134 'Marcin KuΕΊminski', 'me@email.com')
129 cwd = path = jn(dest)
135 cwd = path = jn(dest)
130
136
131 # commit some stuff into this repo
137 # commit some stuff into this repo
132 _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs)
138 _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs)
133
139
134 default_target_branch = {
140 default_target_branch = {
135 'git': 'master',
141 'git': 'master',
136 'hg': 'default'
142 'hg': 'default'
137 }.get(vcs)
143 }.get(vcs)
138
144
139 target_branch = target_branch or default_target_branch
145 target_branch = target_branch or default_target_branch
140
146
141 # PUSH it back
147 # PUSH it back
142 stdout = stderr = None
148 stdout = stderr = None
143 if vcs == 'hg':
149 if vcs == 'hg':
144 maybe_new_branch = ''
150 maybe_new_branch = ''
145 if new_branch:
151 if new_branch:
146 maybe_new_branch = '--new-branch'
152 maybe_new_branch = '--new-branch'
147 stdout, stderr = Command(cwd).execute(
153 stdout, stderr = Command(cwd).execute(
148 'hg push --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url)
154 'hg push --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url)
149 )
155 )
150 elif vcs == 'git':
156 elif vcs == 'git':
151 stdout, stderr = Command(cwd).execute(
157 stdout, stderr = Command(cwd).execute(
152 """{} &&
158 """{} &&
153 git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch)
159 git push --verbose --tags {} {}""".format(git_ident, clone_url, target_branch)
154 )
160 )
155
161
156 return stdout, stderr
162 return stdout, stderr
157
163
158
164
159 def _check_proper_git_push(
165 def _check_proper_git_push(
160 stdout, stderr, branch='master', should_set_default_branch=False):
166 stdout, stderr, branch='master', should_set_default_branch=False):
161 # Note: Git is writing most information to stderr intentionally
167 # Note: Git is writing most information to stderr intentionally
162 assert 'fatal' not in stderr
168 assert 'fatal' not in stderr
163 assert 'rejected' not in stderr
169 assert 'rejected' not in stderr
164 assert 'Pushing to' in stderr
170 assert 'Pushing to' in stderr
165 assert '%s -> %s' % (branch, branch) in stderr
171 assert '%s -> %s' % (branch, branch) in stderr
166
172
167 if should_set_default_branch:
173 if should_set_default_branch:
168 assert "Setting default branch to %s" % branch in stderr
174 assert "Setting default branch to %s" % branch in stderr
169 else:
175 else:
170 assert "Setting default branch" not in stderr
176 assert "Setting default branch" not in stderr
171
177
172
178
173 def _check_proper_hg_push(stdout, stderr, branch='default'):
179 def _check_proper_hg_push(stdout, stderr, branch='default'):
174 assert 'pushing to' in stdout
180 assert 'pushing to' in stdout
175 assert 'searching for changes' in stdout
181 assert 'searching for changes' in stdout
176
182
177 assert 'abort:' not in stderr
183 assert 'abort:' not in stderr
178
184
179
185
180 def _check_proper_clone(stdout, stderr, vcs):
186 def _check_proper_clone(stdout, stderr, vcs):
181 if vcs == 'hg':
187 if vcs == 'hg':
182 assert 'requesting all changes' in stdout
188 assert 'requesting all changes' in stdout
183 assert 'adding changesets' in stdout
189 assert 'adding changesets' in stdout
184 assert 'adding manifests' in stdout
190 assert 'adding manifests' in stdout
185 assert 'adding file changes' in stdout
191 assert 'adding file changes' in stdout
186
192
187 assert stderr == ''
193 assert stderr == ''
188
194
189 if vcs == 'git':
195 if vcs == 'git':
190 assert '' == stdout
196 assert '' == stdout
191 assert 'Cloning into' in stderr
197 assert 'Cloning into' in stderr
192 assert 'abort:' not in stderr
198 assert 'abort:' not in stderr
193 assert 'fatal:' not in stderr
199 assert 'fatal:' not in stderr
General Comments 0
You need to be logged in to leave comments. Login now