Show More
@@ -1,338 +1,342 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from rhodecode.model.db import User |
|
24 | 24 | from rhodecode.model.pull_request import PullRequestModel |
|
25 | 25 | from rhodecode.model.repo import RepoModel |
|
26 | 26 | from rhodecode.model.user import UserModel |
|
27 | 27 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN |
|
28 | 28 | from rhodecode.api.tests.utils import build_data, api_call, assert_error |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | @pytest.mark.usefixtures("testuser_api", "app") |
|
32 | 32 | class TestCreatePullRequestApi(object): |
|
33 | 33 | finalizers = [] |
|
34 | 34 | |
|
35 | 35 | def teardown_method(self, method): |
|
36 | 36 | if self.finalizers: |
|
37 | 37 | for finalizer in self.finalizers: |
|
38 | 38 | finalizer() |
|
39 | 39 | self.finalizers = [] |
|
40 | 40 | |
|
41 | 41 | def test_create_with_wrong_data(self): |
|
42 | 42 | required_data = { |
|
43 | 43 | 'source_repo': 'tests/source_repo', |
|
44 | 44 | 'target_repo': 'tests/target_repo', |
|
45 | 45 | 'source_ref': 'branch:default:initial', |
|
46 | 46 | 'target_ref': 'branch:default:new-feature', |
|
47 | 47 | } |
|
48 | 48 | for key in required_data: |
|
49 | 49 | data = required_data.copy() |
|
50 | 50 | data.pop(key) |
|
51 | 51 | id_, params = build_data( |
|
52 | 52 | self.apikey, 'create_pull_request', **data) |
|
53 | 53 | response = api_call(self.app, params) |
|
54 | 54 | |
|
55 | 55 | expected = 'Missing non optional `{}` arg in JSON DATA'.format(key) |
|
56 | 56 | assert_error(id_, expected, given=response.body) |
|
57 | 57 | |
|
58 | 58 | @pytest.mark.backends("git", "hg") |
|
59 | 59 | def test_create_with_correct_data(self, backend): |
|
60 | 60 | data = self._prepare_data(backend) |
|
61 | 61 | RepoModel().revoke_user_permission( |
|
62 | 62 | self.source.repo_name, User.DEFAULT_USER) |
|
63 | 63 | id_, params = build_data( |
|
64 | 64 | self.apikey_regular, 'create_pull_request', **data) |
|
65 | 65 | response = api_call(self.app, params) |
|
66 | 66 | expected_message = "Created new pull request `{title}`".format( |
|
67 | 67 | title=data['title']) |
|
68 | 68 | result = response.json |
|
69 | assert result['error'] == None | |
|
69 | 70 | assert result['result']['msg'] == expected_message |
|
70 | 71 | pull_request_id = result['result']['pull_request_id'] |
|
71 | 72 | pull_request = PullRequestModel().get(pull_request_id) |
|
72 | 73 | assert pull_request.title == data['title'] |
|
73 | 74 | assert pull_request.description == data['description'] |
|
74 | 75 | assert pull_request.source_ref == data['source_ref'] |
|
75 | 76 | assert pull_request.target_ref == data['target_ref'] |
|
76 | 77 | assert pull_request.source_repo.repo_name == data['source_repo'] |
|
77 | 78 | assert pull_request.target_repo.repo_name == data['target_repo'] |
|
78 | 79 | assert pull_request.revisions == [self.commit_ids['change']] |
|
79 | 80 | assert len(pull_request.reviewers) == 1 |
|
80 | 81 | |
|
81 | 82 | @pytest.mark.backends("git", "hg") |
|
82 | 83 | def test_create_with_empty_description(self, backend): |
|
83 | 84 | data = self._prepare_data(backend) |
|
84 | 85 | data.pop('description') |
|
85 | 86 | id_, params = build_data( |
|
86 | 87 | self.apikey_regular, 'create_pull_request', **data) |
|
87 | 88 | response = api_call(self.app, params) |
|
88 | 89 | expected_message = "Created new pull request `{title}`".format( |
|
89 | 90 | title=data['title']) |
|
90 | 91 | result = response.json |
|
92 | assert result['error'] == None | |
|
91 | 93 | assert result['result']['msg'] == expected_message |
|
92 | 94 | pull_request_id = result['result']['pull_request_id'] |
|
93 | 95 | pull_request = PullRequestModel().get(pull_request_id) |
|
94 | 96 | assert pull_request.description == '' |
|
95 | 97 | |
|
96 | 98 | @pytest.mark.backends("git", "hg") |
|
97 | 99 | def test_create_with_empty_title(self, backend): |
|
98 | 100 | data = self._prepare_data(backend) |
|
99 | 101 | data.pop('title') |
|
100 | 102 | id_, params = build_data( |
|
101 | 103 | self.apikey_regular, 'create_pull_request', **data) |
|
102 | 104 | response = api_call(self.app, params) |
|
103 | 105 | result = response.json |
|
104 | 106 | pull_request_id = result['result']['pull_request_id'] |
|
105 | 107 | pull_request = PullRequestModel().get(pull_request_id) |
|
106 | 108 | data['ref'] = backend.default_branch_name |
|
107 | 109 | title = '{source_repo}#{ref} to {target_repo}'.format(**data) |
|
108 | 110 | assert pull_request.title == title |
|
109 | 111 | |
|
110 | 112 | @pytest.mark.backends("git", "hg") |
|
111 | 113 | def test_create_with_reviewers_specified_by_names( |
|
112 | 114 | self, backend, no_notifications): |
|
113 | 115 | data = self._prepare_data(backend) |
|
114 | 116 | reviewers = [ |
|
115 | 117 | {'username': TEST_USER_REGULAR_LOGIN, |
|
116 | 118 | 'reasons': ['{} added manually'.format(TEST_USER_REGULAR_LOGIN)]}, |
|
117 | 119 | {'username': TEST_USER_ADMIN_LOGIN, |
|
118 | 120 | 'reasons': ['{} added manually'.format(TEST_USER_ADMIN_LOGIN)], |
|
119 | 121 | 'mandatory': True}, |
|
120 | 122 | ] |
|
121 | 123 | data['reviewers'] = reviewers |
|
122 | 124 | |
|
123 | 125 | id_, params = build_data( |
|
124 | 126 | self.apikey_regular, 'create_pull_request', **data) |
|
125 | 127 | response = api_call(self.app, params) |
|
126 | 128 | |
|
127 | 129 | expected_message = "Created new pull request `{title}`".format( |
|
128 | 130 | title=data['title']) |
|
129 | 131 | result = response.json |
|
132 | assert result['error'] == None | |
|
130 | 133 | assert result['result']['msg'] == expected_message |
|
131 | 134 | pull_request_id = result['result']['pull_request_id'] |
|
132 | 135 | pull_request = PullRequestModel().get(pull_request_id) |
|
133 | 136 | |
|
134 | 137 | actual_reviewers = [] |
|
135 | 138 | for rev in pull_request.reviewers: |
|
136 | 139 | entry = { |
|
137 | 140 | 'username': rev.user.username, |
|
138 | 141 | 'reasons': rev.reasons, |
|
139 | 142 | } |
|
140 | 143 | if rev.mandatory: |
|
141 | 144 | entry['mandatory'] = rev.mandatory |
|
142 | 145 | actual_reviewers.append(entry) |
|
143 | 146 | |
|
144 | 147 | # default reviewer will be added who is an owner of the repo |
|
145 | 148 | reviewers.append( |
|
146 | 149 | {'username': pull_request.author.username, |
|
147 | 150 | 'reasons': [u'Default reviewer', u'Repository owner']}, |
|
148 | 151 | ) |
|
149 | 152 | assert sorted(actual_reviewers, key=lambda e: e['username']) \ |
|
150 | 153 | == sorted(reviewers, key=lambda e: e['username']) |
|
151 | 154 | |
|
152 | 155 | @pytest.mark.backends("git", "hg") |
|
153 | 156 | def test_create_with_reviewers_specified_by_ids( |
|
154 | 157 | self, backend, no_notifications): |
|
155 | 158 | data = self._prepare_data(backend) |
|
156 | 159 | reviewers = [ |
|
157 | 160 | {'username': UserModel().get_by_username( |
|
158 | 161 | TEST_USER_REGULAR_LOGIN).user_id, |
|
159 | 162 | 'reasons': ['added manually']}, |
|
160 | 163 | {'username': UserModel().get_by_username( |
|
161 | 164 | TEST_USER_ADMIN_LOGIN).user_id, |
|
162 | 165 | 'reasons': ['added manually']}, |
|
163 | 166 | ] |
|
164 | 167 | |
|
165 | 168 | data['reviewers'] = reviewers |
|
166 | 169 | id_, params = build_data( |
|
167 | 170 | self.apikey_regular, 'create_pull_request', **data) |
|
168 | 171 | response = api_call(self.app, params) |
|
169 | 172 | |
|
170 | 173 | expected_message = "Created new pull request `{title}`".format( |
|
171 | 174 | title=data['title']) |
|
172 | 175 | result = response.json |
|
176 | assert result['error'] == None | |
|
173 | 177 | assert result['result']['msg'] == expected_message |
|
174 | 178 | pull_request_id = result['result']['pull_request_id'] |
|
175 | 179 | pull_request = PullRequestModel().get(pull_request_id) |
|
176 | 180 | |
|
177 | 181 | actual_reviewers = [] |
|
178 | 182 | for rev in pull_request.reviewers: |
|
179 | 183 | entry = { |
|
180 | 184 | 'username': rev.user.user_id, |
|
181 | 185 | 'reasons': rev.reasons, |
|
182 | 186 | } |
|
183 | 187 | if rev.mandatory: |
|
184 | 188 | entry['mandatory'] = rev.mandatory |
|
185 | 189 | actual_reviewers.append(entry) |
|
186 | 190 | # default reviewer will be added who is an owner of the repo |
|
187 | 191 | reviewers.append( |
|
188 | 192 | {'username': pull_request.author.user_id, |
|
189 | 193 | 'reasons': [u'Default reviewer', u'Repository owner']}, |
|
190 | 194 | ) |
|
191 | 195 | assert sorted(actual_reviewers, key=lambda e: e['username']) \ |
|
192 | 196 | == sorted(reviewers, key=lambda e: e['username']) |
|
193 | 197 | |
|
194 | 198 | @pytest.mark.backends("git", "hg") |
|
195 | 199 | def test_create_fails_when_the_reviewer_is_not_found(self, backend): |
|
196 | 200 | data = self._prepare_data(backend) |
|
197 | 201 | data['reviewers'] = [{'username': 'somebody'}] |
|
198 | 202 | id_, params = build_data( |
|
199 | 203 | self.apikey_regular, 'create_pull_request', **data) |
|
200 | 204 | response = api_call(self.app, params) |
|
201 | 205 | expected_message = 'user `somebody` does not exist' |
|
202 | 206 | assert_error(id_, expected_message, given=response.body) |
|
203 | 207 | |
|
204 | 208 | @pytest.mark.backends("git", "hg") |
|
205 | 209 | def test_cannot_create_with_reviewers_in_wrong_format(self, backend): |
|
206 | 210 | data = self._prepare_data(backend) |
|
207 | 211 | reviewers = ','.join([TEST_USER_REGULAR_LOGIN, TEST_USER_ADMIN_LOGIN]) |
|
208 | 212 | data['reviewers'] = reviewers |
|
209 | 213 | id_, params = build_data( |
|
210 | 214 | self.apikey_regular, 'create_pull_request', **data) |
|
211 | 215 | response = api_call(self.app, params) |
|
212 | 216 | expected_message = {u'': '"test_regular,test_admin" is not iterable'} |
|
213 | 217 | assert_error(id_, expected_message, given=response.body) |
|
214 | 218 | |
|
215 | 219 | @pytest.mark.backends("git", "hg") |
|
216 | 220 | def test_create_with_no_commit_hashes(self, backend): |
|
217 | 221 | data = self._prepare_data(backend) |
|
218 | 222 | expected_source_ref = data['source_ref'] |
|
219 | 223 | expected_target_ref = data['target_ref'] |
|
220 | 224 | data['source_ref'] = 'branch:{}'.format(backend.default_branch_name) |
|
221 | 225 | data['target_ref'] = 'branch:{}'.format(backend.default_branch_name) |
|
222 | 226 | id_, params = build_data( |
|
223 | 227 | self.apikey_regular, 'create_pull_request', **data) |
|
224 | 228 | response = api_call(self.app, params) |
|
225 | 229 | expected_message = "Created new pull request `{title}`".format( |
|
226 | 230 | title=data['title']) |
|
227 | 231 | result = response.json |
|
228 | 232 | assert result['result']['msg'] == expected_message |
|
229 | 233 | pull_request_id = result['result']['pull_request_id'] |
|
230 | 234 | pull_request = PullRequestModel().get(pull_request_id) |
|
231 | 235 | assert pull_request.source_ref == expected_source_ref |
|
232 | 236 | assert pull_request.target_ref == expected_target_ref |
|
233 | 237 | |
|
234 | 238 | @pytest.mark.backends("git", "hg") |
|
235 | 239 | @pytest.mark.parametrize("data_key", ["source_repo", "target_repo"]) |
|
236 | 240 | def test_create_fails_with_wrong_repo(self, backend, data_key): |
|
237 | 241 | repo_name = 'fake-repo' |
|
238 | 242 | data = self._prepare_data(backend) |
|
239 | 243 | data[data_key] = repo_name |
|
240 | 244 | id_, params = build_data( |
|
241 | 245 | self.apikey_regular, 'create_pull_request', **data) |
|
242 | 246 | response = api_call(self.app, params) |
|
243 | 247 | expected_message = 'repository `{}` does not exist'.format(repo_name) |
|
244 | 248 | assert_error(id_, expected_message, given=response.body) |
|
245 | 249 | |
|
246 | 250 | @pytest.mark.backends("git", "hg") |
|
247 | 251 | @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"]) |
|
248 | 252 | def test_create_fails_with_non_existing_branch(self, backend, data_key): |
|
249 | 253 | branch_name = 'test-branch' |
|
250 | 254 | data = self._prepare_data(backend) |
|
251 | 255 | data[data_key] = "branch:{}".format(branch_name) |
|
252 | 256 | id_, params = build_data( |
|
253 | 257 | self.apikey_regular, 'create_pull_request', **data) |
|
254 | 258 | response = api_call(self.app, params) |
|
255 | 259 | expected_message = 'The specified value:{type}:`{name}` ' \ |
|
256 | 260 | 'does not exist, or is not allowed.'.format(type='branch', |
|
257 | 261 | name=branch_name) |
|
258 | 262 | assert_error(id_, expected_message, given=response.body) |
|
259 | 263 | |
|
260 | 264 | @pytest.mark.backends("git", "hg") |
|
261 | 265 | @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"]) |
|
262 | 266 | def test_create_fails_with_ref_in_a_wrong_format(self, backend, data_key): |
|
263 | 267 | data = self._prepare_data(backend) |
|
264 | 268 | ref = 'stange-ref' |
|
265 | 269 | data[data_key] = ref |
|
266 | 270 | id_, params = build_data( |
|
267 | 271 | self.apikey_regular, 'create_pull_request', **data) |
|
268 | 272 | response = api_call(self.app, params) |
|
269 | 273 | expected_message = ( |
|
270 | 274 | 'Ref `{ref}` given in a wrong format. Please check the API' |
|
271 | 275 | ' documentation for more details'.format(ref=ref)) |
|
272 | 276 | assert_error(id_, expected_message, given=response.body) |
|
273 | 277 | |
|
274 | 278 | @pytest.mark.backends("git", "hg") |
|
275 | 279 | @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"]) |
|
276 | 280 | def test_create_fails_with_non_existing_ref(self, backend, data_key): |
|
277 | 281 | commit_id = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa10' |
|
278 | 282 | ref = self._get_full_ref(backend, commit_id) |
|
279 | 283 | data = self._prepare_data(backend) |
|
280 | 284 | data[data_key] = ref |
|
281 | 285 | id_, params = build_data( |
|
282 | 286 | self.apikey_regular, 'create_pull_request', **data) |
|
283 | 287 | response = api_call(self.app, params) |
|
284 | 288 | expected_message = 'Ref `{}` does not exist'.format(ref) |
|
285 | 289 | assert_error(id_, expected_message, given=response.body) |
|
286 | 290 | |
|
287 | 291 | @pytest.mark.backends("git", "hg") |
|
288 | 292 | def test_create_fails_when_no_revisions(self, backend): |
|
289 | 293 | data = self._prepare_data(backend, source_head='initial') |
|
290 | 294 | id_, params = build_data( |
|
291 | 295 | self.apikey_regular, 'create_pull_request', **data) |
|
292 | 296 | response = api_call(self.app, params) |
|
293 | 297 | expected_message = 'no commits found' |
|
294 | 298 | assert_error(id_, expected_message, given=response.body) |
|
295 | 299 | |
|
296 | 300 | @pytest.mark.backends("git", "hg") |
|
297 | 301 | def test_create_fails_when_no_permissions(self, backend): |
|
298 | 302 | data = self._prepare_data(backend) |
|
299 | 303 | RepoModel().revoke_user_permission( |
|
300 | 304 | self.source.repo_name, self.test_user) |
|
301 | 305 | RepoModel().revoke_user_permission( |
|
302 | 306 | self.source.repo_name, User.DEFAULT_USER) |
|
303 | 307 | |
|
304 | 308 | id_, params = build_data( |
|
305 | 309 | self.apikey_regular, 'create_pull_request', **data) |
|
306 | 310 | response = api_call(self.app, params) |
|
307 | 311 | expected_message = 'repository `{}` does not exist'.format( |
|
308 | 312 | self.source.repo_name) |
|
309 | 313 | assert_error(id_, expected_message, given=response.body) |
|
310 | 314 | |
|
311 | 315 | def _prepare_data( |
|
312 | 316 | self, backend, source_head='change', target_head='initial'): |
|
313 | 317 | commits = [ |
|
314 | 318 | {'message': 'initial'}, |
|
315 | 319 | {'message': 'change'}, |
|
316 | 320 | {'message': 'new-feature', 'parents': ['initial']}, |
|
317 | 321 | ] |
|
318 | 322 | self.commit_ids = backend.create_master_repo(commits) |
|
319 | 323 | self.source = backend.create_repo(heads=[source_head]) |
|
320 | 324 | self.target = backend.create_repo(heads=[target_head]) |
|
321 | 325 | |
|
322 | 326 | data = { |
|
323 | 327 | 'source_repo': self.source.repo_name, |
|
324 | 328 | 'target_repo': self.target.repo_name, |
|
325 | 329 | 'source_ref': self._get_full_ref( |
|
326 | 330 | backend, self.commit_ids[source_head]), |
|
327 | 331 | 'target_ref': self._get_full_ref( |
|
328 | 332 | backend, self.commit_ids[target_head]), |
|
329 | 333 | 'title': 'Test PR 1', |
|
330 | 334 | 'description': 'Test' |
|
331 | 335 | } |
|
332 | 336 | RepoModel().grant_user_permission( |
|
333 | 337 | self.source.repo_name, self.TEST_USER_LOGIN, 'repository.read') |
|
334 | 338 | return data |
|
335 | 339 | |
|
336 | 340 | def _get_full_ref(self, backend, commit_id): |
|
337 | 341 | return 'branch:{branch}:{commit_id}'.format( |
|
338 | 342 | branch=backend.default_branch_name, commit_id=commit_id) |
@@ -1,308 +1,308 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | RhodeCode task modules, containing all task that suppose to be run |
|
23 | 23 | by celery daemon |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import time |
|
28 | 28 | |
|
29 | 29 | import rhodecode |
|
30 | 30 | from rhodecode.lib import audit_logger |
|
31 | 31 | from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask |
|
32 | 32 | from rhodecode.lib.hooks_base import log_create_repository |
|
33 | 33 | from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer |
|
34 | 34 | from rhodecode.lib.utils2 import safe_int, str2bool |
|
35 | 35 | from rhodecode.model.db import Session, IntegrityError, Repository, User, true |
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | @async_task(ignore_result=True, base=RequestContextTask) |
|
39 | 39 | def send_email(recipients, subject, body='', html_body='', email_config=None): |
|
40 | 40 | """ |
|
41 | 41 | Sends an email with defined parameters from the .ini files. |
|
42 | 42 | |
|
43 | 43 | :param recipients: list of recipients, it this is empty the defined email |
|
44 | 44 | address from field 'email_to' is used instead |
|
45 | 45 | :param subject: subject of the mail |
|
46 | 46 | :param body: body of the mail |
|
47 | 47 | :param html_body: html version of body |
|
48 | 48 | """ |
|
49 | 49 | log = get_logger(send_email) |
|
50 | 50 | |
|
51 | 51 | email_config = email_config or rhodecode.CONFIG |
|
52 | 52 | |
|
53 | 53 | mail_server = email_config.get('smtp_server') or None |
|
54 | 54 | if mail_server is None: |
|
55 | 55 | log.error("SMTP server information missing. Sending email failed. " |
|
56 | 56 | "Make sure that `smtp_server` variable is configured " |
|
57 | 57 | "inside the .ini file") |
|
58 | 58 | return False |
|
59 | 59 | |
|
60 | 60 | subject = "%s %s" % (email_config.get('email_prefix', ''), subject) |
|
61 | 61 | if not recipients: |
|
62 | 62 | # if recipients are not defined we send to email_config + all admins |
|
63 | 63 | admins = [] |
|
64 | 64 | for u in User.query().filter(User.admin == true()).all(): |
|
65 | 65 | if u.email: |
|
66 | 66 | admins.append(u.email) |
|
67 | 67 | recipients = [] |
|
68 | 68 | config_email = email_config.get('email_to') |
|
69 | 69 | if config_email: |
|
70 | 70 | recipients += [config_email] |
|
71 | 71 | recipients += admins |
|
72 | 72 | |
|
73 | 73 | mail_from = email_config.get('app_email_from', 'RhodeCode') |
|
74 | 74 | user = email_config.get('smtp_username') |
|
75 | 75 | passwd = email_config.get('smtp_password') |
|
76 | 76 | mail_port = email_config.get('smtp_port') |
|
77 | 77 | tls = str2bool(email_config.get('smtp_use_tls')) |
|
78 | 78 | ssl = str2bool(email_config.get('smtp_use_ssl')) |
|
79 | 79 | debug = str2bool(email_config.get('debug')) |
|
80 | 80 | smtp_auth = email_config.get('smtp_auth') |
|
81 | 81 | |
|
82 | 82 | try: |
|
83 | 83 | m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth, |
|
84 | 84 | mail_port, ssl, tls, debug=debug) |
|
85 | 85 | m.send(recipients, subject, body, html_body) |
|
86 | 86 | except Exception: |
|
87 | 87 | log.exception('Mail sending failed') |
|
88 | 88 | return False |
|
89 | 89 | return True |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | @async_task(ignore_result=True, base=RequestContextTask) |
|
93 | 93 | def create_repo(form_data, cur_user): |
|
94 | 94 | from rhodecode.model.repo import RepoModel |
|
95 | 95 | from rhodecode.model.user import UserModel |
|
96 | 96 | from rhodecode.model.settings import SettingsModel |
|
97 | 97 | |
|
98 | 98 | log = get_logger(create_repo) |
|
99 | 99 | |
|
100 | 100 | cur_user = UserModel()._get_user(cur_user) |
|
101 | 101 | owner = cur_user |
|
102 | 102 | |
|
103 | 103 | repo_name = form_data['repo_name'] |
|
104 | 104 | repo_name_full = form_data['repo_name_full'] |
|
105 | 105 | repo_type = form_data['repo_type'] |
|
106 | 106 | description = form_data['repo_description'] |
|
107 | 107 | private = form_data['repo_private'] |
|
108 | 108 | clone_uri = form_data.get('clone_uri') |
|
109 | 109 | repo_group = safe_int(form_data['repo_group']) |
|
110 | 110 | landing_rev = form_data['repo_landing_rev'] |
|
111 | 111 | copy_fork_permissions = form_data.get('copy_permissions') |
|
112 | 112 | copy_group_permissions = form_data.get('repo_copy_permissions') |
|
113 | 113 | fork_of = form_data.get('fork_parent_id') |
|
114 | 114 | state = form_data.get('repo_state', Repository.STATE_PENDING) |
|
115 | 115 | |
|
116 | 116 | # repo creation defaults, private and repo_type are filled in form |
|
117 | 117 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
118 | 118 | enable_statistics = form_data.get( |
|
119 | 119 | 'enable_statistics', defs.get('repo_enable_statistics')) |
|
120 | 120 | enable_locking = form_data.get( |
|
121 | 121 | 'enable_locking', defs.get('repo_enable_locking')) |
|
122 | 122 | enable_downloads = form_data.get( |
|
123 | 123 | 'enable_downloads', defs.get('repo_enable_downloads')) |
|
124 | 124 | |
|
125 | 125 | try: |
|
126 |
|
|
|
126 | RepoModel()._create_repo( | |
|
127 | 127 | repo_name=repo_name_full, |
|
128 | 128 | repo_type=repo_type, |
|
129 | 129 | description=description, |
|
130 | 130 | owner=owner, |
|
131 | 131 | private=private, |
|
132 | 132 | clone_uri=clone_uri, |
|
133 | 133 | repo_group=repo_group, |
|
134 | 134 | landing_rev=landing_rev, |
|
135 | 135 | fork_of=fork_of, |
|
136 | 136 | copy_fork_permissions=copy_fork_permissions, |
|
137 | 137 | copy_group_permissions=copy_group_permissions, |
|
138 | 138 | enable_statistics=enable_statistics, |
|
139 | 139 | enable_locking=enable_locking, |
|
140 | 140 | enable_downloads=enable_downloads, |
|
141 | 141 | state=state |
|
142 | 142 | ) |
|
143 | 143 | Session().commit() |
|
144 | 144 | |
|
145 | 145 | # now create this repo on Filesystem |
|
146 | 146 | RepoModel()._create_filesystem_repo( |
|
147 | 147 | repo_name=repo_name, |
|
148 | 148 | repo_type=repo_type, |
|
149 | 149 | repo_group=RepoModel()._get_repo_group(repo_group), |
|
150 | 150 | clone_uri=clone_uri, |
|
151 | 151 | ) |
|
152 | 152 | repo = Repository.get_by_repo_name(repo_name_full) |
|
153 | 153 | log_create_repository(created_by=owner.username, **repo.get_dict()) |
|
154 | 154 | |
|
155 | 155 | # update repo commit caches initially |
|
156 | 156 | repo.update_commit_cache() |
|
157 | 157 | |
|
158 | 158 | # set new created state |
|
159 | 159 | repo.set_state(Repository.STATE_CREATED) |
|
160 | 160 | repo_id = repo.repo_id |
|
161 | 161 | repo_data = repo.get_api_data() |
|
162 | 162 | |
|
163 | 163 | audit_logger.store( |
|
164 | 164 | 'repo.create', action_data={'data': repo_data}, |
|
165 | 165 | user=cur_user, |
|
166 | 166 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) |
|
167 | 167 | |
|
168 | 168 | Session().commit() |
|
169 | 169 | except Exception as e: |
|
170 | 170 | log.warning('Exception occurred when creating repository, ' |
|
171 | 171 | 'doing cleanup...', exc_info=True) |
|
172 | 172 | if isinstance(e, IntegrityError): |
|
173 | 173 | Session().rollback() |
|
174 | 174 | |
|
175 | 175 | # rollback things manually ! |
|
176 | 176 | repo = Repository.get_by_repo_name(repo_name_full) |
|
177 | 177 | if repo: |
|
178 | 178 | Repository.delete(repo.repo_id) |
|
179 | 179 | Session().commit() |
|
180 | 180 | RepoModel()._delete_filesystem_repo(repo) |
|
181 | 181 | log.info('Cleanup of repo %s finished', repo_name_full) |
|
182 | 182 | raise |
|
183 | 183 | |
|
184 | 184 | return True |
|
185 | 185 | |
|
186 | 186 | |
|
187 | 187 | @async_task(ignore_result=True, base=RequestContextTask) |
|
188 | 188 | def create_repo_fork(form_data, cur_user): |
|
189 | 189 | """ |
|
190 | 190 | Creates a fork of repository using internal VCS methods |
|
191 | 191 | """ |
|
192 | 192 | from rhodecode.model.repo import RepoModel |
|
193 | 193 | from rhodecode.model.user import UserModel |
|
194 | 194 | |
|
195 | 195 | log = get_logger(create_repo_fork) |
|
196 | 196 | |
|
197 | 197 | cur_user = UserModel()._get_user(cur_user) |
|
198 | 198 | owner = cur_user |
|
199 | 199 | |
|
200 | 200 | repo_name = form_data['repo_name'] # fork in this case |
|
201 | 201 | repo_name_full = form_data['repo_name_full'] |
|
202 | 202 | repo_type = form_data['repo_type'] |
|
203 | 203 | description = form_data['description'] |
|
204 | 204 | private = form_data['private'] |
|
205 | 205 | clone_uri = form_data.get('clone_uri') |
|
206 | 206 | repo_group = safe_int(form_data['repo_group']) |
|
207 | 207 | landing_rev = form_data['landing_rev'] |
|
208 | 208 | copy_fork_permissions = form_data.get('copy_permissions') |
|
209 | 209 | fork_id = safe_int(form_data.get('fork_parent_id')) |
|
210 | 210 | |
|
211 | 211 | try: |
|
212 | 212 | fork_of = RepoModel()._get_repo(fork_id) |
|
213 | 213 | RepoModel()._create_repo( |
|
214 | 214 | repo_name=repo_name_full, |
|
215 | 215 | repo_type=repo_type, |
|
216 | 216 | description=description, |
|
217 | 217 | owner=owner, |
|
218 | 218 | private=private, |
|
219 | 219 | clone_uri=clone_uri, |
|
220 | 220 | repo_group=repo_group, |
|
221 | 221 | landing_rev=landing_rev, |
|
222 | 222 | fork_of=fork_of, |
|
223 | 223 | copy_fork_permissions=copy_fork_permissions |
|
224 | 224 | ) |
|
225 | 225 | |
|
226 | 226 | Session().commit() |
|
227 | 227 | |
|
228 | 228 | base_path = Repository.base_path() |
|
229 | 229 | source_repo_path = os.path.join(base_path, fork_of.repo_name) |
|
230 | 230 | |
|
231 | 231 | # now create this repo on Filesystem |
|
232 | 232 | RepoModel()._create_filesystem_repo( |
|
233 | 233 | repo_name=repo_name, |
|
234 | 234 | repo_type=repo_type, |
|
235 | 235 | repo_group=RepoModel()._get_repo_group(repo_group), |
|
236 | 236 | clone_uri=source_repo_path, |
|
237 | 237 | ) |
|
238 | 238 | repo = Repository.get_by_repo_name(repo_name_full) |
|
239 | 239 | log_create_repository(created_by=owner.username, **repo.get_dict()) |
|
240 | 240 | |
|
241 | 241 | # update repo commit caches initially |
|
242 | 242 | config = repo._config |
|
243 | 243 | config.set('extensions', 'largefiles', '') |
|
244 | 244 | repo.update_commit_cache(config=config) |
|
245 | 245 | |
|
246 | 246 | # set new created state |
|
247 | 247 | repo.set_state(Repository.STATE_CREATED) |
|
248 | 248 | |
|
249 | 249 | repo_id = repo.repo_id |
|
250 | 250 | repo_data = repo.get_api_data() |
|
251 | 251 | audit_logger.store( |
|
252 | 252 | 'repo.fork', action_data={'data': repo_data}, |
|
253 | 253 | user=cur_user, |
|
254 | 254 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) |
|
255 | 255 | |
|
256 | 256 | Session().commit() |
|
257 | 257 | except Exception as e: |
|
258 | 258 | log.warning('Exception occurred when forking repository, ' |
|
259 | 259 | 'doing cleanup...', exc_info=True) |
|
260 | 260 | if isinstance(e, IntegrityError): |
|
261 | 261 | Session().rollback() |
|
262 | 262 | |
|
263 | 263 | # rollback things manually ! |
|
264 | 264 | repo = Repository.get_by_repo_name(repo_name_full) |
|
265 | 265 | if repo: |
|
266 | 266 | Repository.delete(repo.repo_id) |
|
267 | 267 | Session().commit() |
|
268 | 268 | RepoModel()._delete_filesystem_repo(repo) |
|
269 | 269 | log.info('Cleanup of repo %s finished', repo_name_full) |
|
270 | 270 | raise |
|
271 | 271 | |
|
272 | 272 | return True |
|
273 | 273 | |
|
274 | 274 | |
|
275 | 275 | @async_task(ignore_result=True) |
|
276 | 276 | def repo_maintenance(repoid): |
|
277 | 277 | from rhodecode.lib import repo_maintenance as repo_maintenance_lib |
|
278 | 278 | log = get_logger(repo_maintenance) |
|
279 | 279 | repo = Repository.get_by_id_or_repo_name(repoid) |
|
280 | 280 | if repo: |
|
281 | 281 | maintenance = repo_maintenance_lib.RepoMaintenance() |
|
282 | 282 | tasks = maintenance.get_tasks_for_repo(repo) |
|
283 | 283 | log.debug('Executing %s tasks on repo `%s`', tasks, repoid) |
|
284 | 284 | executed_types = maintenance.execute(repo) |
|
285 | 285 | log.debug('Got execution results %s', executed_types) |
|
286 | 286 | else: |
|
287 | 287 | log.debug('Repo `%s` not found or without a clone_url', repoid) |
|
288 | 288 | |
|
289 | 289 | |
|
290 | 290 | @async_task(ignore_result=True) |
|
291 | 291 | def check_for_update(): |
|
292 | 292 | from rhodecode.model.update import UpdateModel |
|
293 | 293 | update_url = UpdateModel().get_update_url() |
|
294 | 294 | cur_ver = rhodecode.__version__ |
|
295 | 295 | |
|
296 | 296 | try: |
|
297 | 297 | data = UpdateModel().get_update_data(update_url) |
|
298 | 298 | latest = data['versions'][0] |
|
299 | 299 | UpdateModel().store_version(latest['version']) |
|
300 | 300 | except Exception: |
|
301 | 301 | pass |
|
302 | 302 | |
|
303 | 303 | |
|
304 | 304 | @async_task(ignore_result=False) |
|
305 | 305 | def beat_check(*args, **kwargs): |
|
306 | 306 | log = get_logger(beat_check) |
|
307 | 307 | log.info('Got args: %r and kwargs %r', args, kwargs) |
|
308 | 308 | return time.time() |
@@ -1,1009 +1,999 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | GIT repository module |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import os |
|
27 | 27 | import re |
|
28 | 28 | |
|
29 | 29 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | 30 | |
|
31 | 31 | from rhodecode.lib.compat import OrderedDict |
|
32 | 32 | from rhodecode.lib.datelib import ( |
|
33 | 33 | utcdate_fromtimestamp, makedate, date_astimestamp) |
|
34 | 34 | from rhodecode.lib.utils import safe_unicode, safe_str |
|
35 | 35 | from rhodecode.lib.vcs import connection, path as vcspath |
|
36 | 36 | from rhodecode.lib.vcs.backends.base import ( |
|
37 | 37 | BaseRepository, CollectionGenerator, Config, MergeResponse, |
|
38 | 38 | MergeFailureReason, Reference) |
|
39 | 39 | from rhodecode.lib.vcs.backends.git.commit import GitCommit |
|
40 | 40 | from rhodecode.lib.vcs.backends.git.diff import GitDiff |
|
41 | 41 | from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit |
|
42 | 42 | from rhodecode.lib.vcs.exceptions import ( |
|
43 | 43 | CommitDoesNotExistError, EmptyRepositoryError, |
|
44 | 44 | RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError) |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$') |
|
48 | 48 | |
|
49 | 49 | log = logging.getLogger(__name__) |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | class GitRepository(BaseRepository): |
|
53 | 53 | """ |
|
54 | 54 | Git repository backend. |
|
55 | 55 | """ |
|
56 | 56 | DEFAULT_BRANCH_NAME = 'master' |
|
57 | 57 | |
|
58 | 58 | contact = BaseRepository.DEFAULT_CONTACT |
|
59 | 59 | |
|
60 | 60 | def __init__(self, repo_path, config=None, create=False, src_url=None, |
|
61 |
|
|
|
61 | do_workspace_checkout=False, with_wire=None, bare=False): | |
|
62 | 62 | |
|
63 | 63 | self.path = safe_str(os.path.abspath(repo_path)) |
|
64 | 64 | self.config = config if config else self.get_default_config() |
|
65 | 65 | self.with_wire = with_wire |
|
66 | 66 | |
|
67 |
self._init_repo(create, src_url, |
|
|
67 | self._init_repo(create, src_url, do_workspace_checkout, bare) | |
|
68 | 68 | |
|
69 | 69 | # caches |
|
70 | 70 | self._commit_ids = {} |
|
71 | 71 | |
|
72 | 72 | @LazyProperty |
|
73 | 73 | def _remote(self): |
|
74 | 74 | return connection.Git(self.path, self.config, with_wire=self.with_wire) |
|
75 | 75 | |
|
76 | 76 | @LazyProperty |
|
77 | 77 | def bare(self): |
|
78 | 78 | return self._remote.bare() |
|
79 | 79 | |
|
80 | 80 | @LazyProperty |
|
81 | 81 | def head(self): |
|
82 | 82 | return self._remote.head() |
|
83 | 83 | |
|
84 | 84 | @LazyProperty |
|
85 | 85 | def commit_ids(self): |
|
86 | 86 | """ |
|
87 | 87 | Returns list of commit ids, in ascending order. Being lazy |
|
88 | 88 | attribute allows external tools to inject commit ids from cache. |
|
89 | 89 | """ |
|
90 | 90 | commit_ids = self._get_all_commit_ids() |
|
91 | 91 | self._rebuild_cache(commit_ids) |
|
92 | 92 | return commit_ids |
|
93 | 93 | |
|
94 | 94 | def _rebuild_cache(self, commit_ids): |
|
95 | 95 | self._commit_ids = dict((commit_id, index) |
|
96 | 96 | for index, commit_id in enumerate(commit_ids)) |
|
97 | 97 | |
|
98 | 98 | def run_git_command(self, cmd, **opts): |
|
99 | 99 | """ |
|
100 | 100 | Runs given ``cmd`` as git command and returns tuple |
|
101 | 101 | (stdout, stderr). |
|
102 | 102 | |
|
103 | 103 | :param cmd: git command to be executed |
|
104 | 104 | :param opts: env options to pass into Subprocess command |
|
105 | 105 | """ |
|
106 | 106 | if not isinstance(cmd, list): |
|
107 | 107 | raise ValueError('cmd must be a list, got %s instead' % type(cmd)) |
|
108 | 108 | |
|
109 | 109 | skip_stderr_log = opts.pop('skip_stderr_log', False) |
|
110 | 110 | out, err = self._remote.run_git_command(cmd, **opts) |
|
111 | 111 | if err and not skip_stderr_log: |
|
112 | 112 | log.debug('Stderr output of git command "%s":\n%s', cmd, err) |
|
113 | 113 | return out, err |
|
114 | 114 | |
|
115 | 115 | @staticmethod |
|
116 | 116 | def check_url(url, config): |
|
117 | 117 | """ |
|
118 | 118 | Function will check given url and try to verify if it's a valid |
|
119 | 119 | link. Sometimes it may happened that git will issue basic |
|
120 | 120 | auth request that can cause whole API to hang when used from python |
|
121 | 121 | or other external calls. |
|
122 | 122 | |
|
123 | 123 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
124 | 124 | when the return code is non 200 |
|
125 | 125 | """ |
|
126 | 126 | # check first if it's not an url |
|
127 | 127 | if os.path.isdir(url) or url.startswith('file:'): |
|
128 | 128 | return True |
|
129 | 129 | |
|
130 | 130 | if '+' in url.split('://', 1)[0]: |
|
131 | 131 | url = url.split('+', 1)[1] |
|
132 | 132 | |
|
133 | 133 | # Request the _remote to verify the url |
|
134 | 134 | return connection.Git.check_url(url, config.serialize()) |
|
135 | 135 | |
|
136 | 136 | @staticmethod |
|
137 | 137 | def is_valid_repository(path): |
|
138 | 138 | if os.path.isdir(os.path.join(path, '.git')): |
|
139 | 139 | return True |
|
140 | 140 | # check case of bare repository |
|
141 | 141 | try: |
|
142 | 142 | GitRepository(path) |
|
143 | 143 | return True |
|
144 | 144 | except VCSError: |
|
145 | 145 | pass |
|
146 | 146 | return False |
|
147 | 147 | |
|
148 |
def _init_repo(self, create, src_url=None, |
|
|
148 | def _init_repo(self, create, src_url=None, do_workspace_checkout=False, | |
|
149 | 149 | bare=False): |
|
150 | 150 | if create and os.path.exists(self.path): |
|
151 | 151 | raise RepositoryError( |
|
152 | 152 | "Cannot create repository at %s, location already exist" |
|
153 | 153 | % self.path) |
|
154 | 154 | |
|
155 | if bare and do_workspace_checkout: | |
|
156 | raise RepositoryError("Cannot update a bare repository") | |
|
155 | 157 | try: |
|
156 | if create and src_url: | |
|
158 | ||
|
159 | if src_url: | |
|
160 | # check URL before any actions | |
|
157 | 161 | GitRepository.check_url(src_url, self.config) |
|
158 | self.clone(src_url, update_after_clone, bare) | |
|
159 |
|
|
|
162 | ||
|
163 | if create: | |
|
160 | 164 | os.makedirs(self.path, mode=0755) |
|
161 | 165 | |
|
162 | 166 | if bare: |
|
163 | 167 | self._remote.init_bare() |
|
164 | 168 | else: |
|
165 | 169 | self._remote.init() |
|
170 | ||
|
171 | if src_url and bare: | |
|
172 | # bare repository only allows a fetch and checkout is not allowed | |
|
173 | self.fetch(src_url, commit_ids=None) | |
|
174 | elif src_url: | |
|
175 | self.pull(src_url, commit_ids=None, | |
|
176 | update_after=do_workspace_checkout) | |
|
177 | ||
|
166 | 178 | else: |
|
167 | 179 | if not self._remote.assert_correct_path(): |
|
168 | 180 | raise RepositoryError( |
|
169 | 181 | 'Path "%s" does not contain a Git repository' % |
|
170 | 182 | (self.path,)) |
|
171 | 183 | |
|
172 | 184 | # TODO: johbo: check if we have to translate the OSError here |
|
173 | 185 | except OSError as err: |
|
174 | 186 | raise RepositoryError(err) |
|
175 | 187 | |
|
176 | 188 | def _get_all_commit_ids(self, filters=None): |
|
177 | 189 | # we must check if this repo is not empty, since later command |
|
178 | 190 | # fails if it is. And it's cheaper to ask than throw the subprocess |
|
179 | 191 | # errors |
|
180 | 192 | |
|
181 | 193 | head = self._remote.head(show_exc=False) |
|
182 | 194 | if not head: |
|
183 | 195 | return [] |
|
184 | 196 | |
|
185 | 197 | rev_filter = ['--branches', '--tags'] |
|
186 | 198 | extra_filter = [] |
|
187 | 199 | |
|
188 | 200 | if filters: |
|
189 | 201 | if filters.get('since'): |
|
190 | 202 | extra_filter.append('--since=%s' % (filters['since'])) |
|
191 | 203 | if filters.get('until'): |
|
192 | 204 | extra_filter.append('--until=%s' % (filters['until'])) |
|
193 | 205 | if filters.get('branch_name'): |
|
194 | 206 | rev_filter = ['--tags'] |
|
195 | 207 | extra_filter.append(filters['branch_name']) |
|
196 | 208 | rev_filter.extend(extra_filter) |
|
197 | 209 | |
|
198 | 210 | # if filters.get('start') or filters.get('end'): |
|
199 | 211 | # # skip is offset, max-count is limit |
|
200 | 212 | # if filters.get('start'): |
|
201 | 213 | # extra_filter += ' --skip=%s' % filters['start'] |
|
202 | 214 | # if filters.get('end'): |
|
203 | 215 | # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0)) |
|
204 | 216 | |
|
205 | 217 | cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter |
|
206 | 218 | try: |
|
207 | 219 | output, __ = self.run_git_command(cmd) |
|
208 | 220 | except RepositoryError: |
|
209 | 221 | # Can be raised for empty repositories |
|
210 | 222 | return [] |
|
211 | 223 | return output.splitlines() |
|
212 | 224 | |
|
213 | 225 | def _get_commit_id(self, commit_id_or_idx): |
|
214 | 226 | def is_null(value): |
|
215 | 227 | return len(value) == commit_id_or_idx.count('0') |
|
216 | 228 | |
|
217 | 229 | if self.is_empty(): |
|
218 | 230 | raise EmptyRepositoryError("There are no commits yet") |
|
219 | 231 | |
|
220 | 232 | if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1): |
|
221 | 233 | return self.commit_ids[-1] |
|
222 | 234 | |
|
223 | 235 | is_bstr = isinstance(commit_id_or_idx, (str, unicode)) |
|
224 | 236 | if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) |
|
225 | 237 | or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)): |
|
226 | 238 | try: |
|
227 | 239 | commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)] |
|
228 | 240 | except Exception: |
|
229 | 241 | msg = "Commit %s does not exist for %s" % ( |
|
230 | 242 | commit_id_or_idx, self) |
|
231 | 243 | raise CommitDoesNotExistError(msg) |
|
232 | 244 | |
|
233 | 245 | elif is_bstr: |
|
234 | 246 | # check full path ref, eg. refs/heads/master |
|
235 | 247 | ref_id = self._refs.get(commit_id_or_idx) |
|
236 | 248 | if ref_id: |
|
237 | 249 | return ref_id |
|
238 | 250 | |
|
239 | 251 | # check branch name |
|
240 | 252 | branch_ids = self.branches.values() |
|
241 | 253 | ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx) |
|
242 | 254 | if ref_id: |
|
243 | 255 | return ref_id |
|
244 | 256 | |
|
245 | 257 | # check tag name |
|
246 | 258 | ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx) |
|
247 | 259 | if ref_id: |
|
248 | 260 | return ref_id |
|
249 | 261 | |
|
250 | 262 | if (not SHA_PATTERN.match(commit_id_or_idx) or |
|
251 | 263 | commit_id_or_idx not in self.commit_ids): |
|
252 | 264 | msg = "Commit %s does not exist for %s" % ( |
|
253 | 265 | commit_id_or_idx, self) |
|
254 | 266 | raise CommitDoesNotExistError(msg) |
|
255 | 267 | |
|
256 | 268 | # Ensure we return full id |
|
257 | 269 | if not SHA_PATTERN.match(str(commit_id_or_idx)): |
|
258 | 270 | raise CommitDoesNotExistError( |
|
259 | 271 | "Given commit id %s not recognized" % commit_id_or_idx) |
|
260 | 272 | return commit_id_or_idx |
|
261 | 273 | |
|
262 | 274 | def get_hook_location(self): |
|
263 | 275 | """ |
|
264 | 276 | returns absolute path to location where hooks are stored |
|
265 | 277 | """ |
|
266 | 278 | loc = os.path.join(self.path, 'hooks') |
|
267 | 279 | if not self.bare: |
|
268 | 280 | loc = os.path.join(self.path, '.git', 'hooks') |
|
269 | 281 | return loc |
|
270 | 282 | |
|
271 | 283 | @LazyProperty |
|
272 | 284 | def last_change(self): |
|
273 | 285 | """ |
|
274 | 286 | Returns last change made on this repository as |
|
275 | 287 | `datetime.datetime` object. |
|
276 | 288 | """ |
|
277 | 289 | try: |
|
278 | 290 | return self.get_commit().date |
|
279 | 291 | except RepositoryError: |
|
280 | 292 | tzoffset = makedate()[1] |
|
281 | 293 | return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset) |
|
282 | 294 | |
|
283 | 295 | def _get_fs_mtime(self): |
|
284 | 296 | idx_loc = '' if self.bare else '.git' |
|
285 | 297 | # fallback to filesystem |
|
286 | 298 | in_path = os.path.join(self.path, idx_loc, "index") |
|
287 | 299 | he_path = os.path.join(self.path, idx_loc, "HEAD") |
|
288 | 300 | if os.path.exists(in_path): |
|
289 | 301 | return os.stat(in_path).st_mtime |
|
290 | 302 | else: |
|
291 | 303 | return os.stat(he_path).st_mtime |
|
292 | 304 | |
|
293 | 305 | @LazyProperty |
|
294 | 306 | def description(self): |
|
295 | 307 | description = self._remote.get_description() |
|
296 | 308 | return safe_unicode(description or self.DEFAULT_DESCRIPTION) |
|
297 | 309 | |
|
298 | 310 | def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True): |
|
299 | 311 | if self.is_empty(): |
|
300 | 312 | return OrderedDict() |
|
301 | 313 | |
|
302 | 314 | result = [] |
|
303 | 315 | for ref, sha in self._refs.iteritems(): |
|
304 | 316 | if ref.startswith(prefix): |
|
305 | 317 | ref_name = ref |
|
306 | 318 | if strip_prefix: |
|
307 | 319 | ref_name = ref[len(prefix):] |
|
308 | 320 | result.append((safe_unicode(ref_name), sha)) |
|
309 | 321 | |
|
310 | 322 | def get_name(entry): |
|
311 | 323 | return entry[0] |
|
312 | 324 | |
|
313 | 325 | return OrderedDict(sorted(result, key=get_name, reverse=reverse)) |
|
314 | 326 | |
|
315 | 327 | def _get_branches(self): |
|
316 | 328 | return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True) |
|
317 | 329 | |
|
318 | 330 | @LazyProperty |
|
319 | 331 | def branches(self): |
|
320 | 332 | return self._get_branches() |
|
321 | 333 | |
|
322 | 334 | @LazyProperty |
|
323 | 335 | def branches_closed(self): |
|
324 | 336 | return {} |
|
325 | 337 | |
|
326 | 338 | @LazyProperty |
|
327 | 339 | def bookmarks(self): |
|
328 | 340 | return {} |
|
329 | 341 | |
|
330 | 342 | @LazyProperty |
|
331 | 343 | def branches_all(self): |
|
332 | 344 | all_branches = {} |
|
333 | 345 | all_branches.update(self.branches) |
|
334 | 346 | all_branches.update(self.branches_closed) |
|
335 | 347 | return all_branches |
|
336 | 348 | |
|
337 | 349 | @LazyProperty |
|
338 | 350 | def tags(self): |
|
339 | 351 | return self._get_tags() |
|
340 | 352 | |
|
341 | 353 | def _get_tags(self): |
|
342 | 354 | return self._get_refs_entries( |
|
343 | 355 | prefix='refs/tags/', strip_prefix=True, reverse=True) |
|
344 | 356 | |
|
345 | 357 | def tag(self, name, user, commit_id=None, message=None, date=None, |
|
346 | 358 | **kwargs): |
|
347 | 359 | # TODO: fix this method to apply annotated tags correct with message |
|
348 | 360 | """ |
|
349 | 361 | Creates and returns a tag for the given ``commit_id``. |
|
350 | 362 | |
|
351 | 363 | :param name: name for new tag |
|
352 | 364 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
353 | 365 | :param commit_id: commit id for which new tag would be created |
|
354 | 366 | :param message: message of the tag's commit |
|
355 | 367 | :param date: date of tag's commit |
|
356 | 368 | |
|
357 | 369 | :raises TagAlreadyExistError: if tag with same name already exists |
|
358 | 370 | """ |
|
359 | 371 | if name in self.tags: |
|
360 | 372 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
361 | 373 | commit = self.get_commit(commit_id=commit_id) |
|
362 | 374 | message = message or "Added tag %s for commit %s" % ( |
|
363 | 375 | name, commit.raw_id) |
|
364 | 376 | self._remote.set_refs('refs/tags/%s' % name, commit._commit['id']) |
|
365 | 377 | |
|
366 | 378 | self._refs = self._get_refs() |
|
367 | 379 | self.tags = self._get_tags() |
|
368 | 380 | return commit |
|
369 | 381 | |
|
370 | 382 | def remove_tag(self, name, user, message=None, date=None): |
|
371 | 383 | """ |
|
372 | 384 | Removes tag with the given ``name``. |
|
373 | 385 | |
|
374 | 386 | :param name: name of the tag to be removed |
|
375 | 387 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
376 | 388 | :param message: message of the tag's removal commit |
|
377 | 389 | :param date: date of tag's removal commit |
|
378 | 390 | |
|
379 | 391 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
380 | 392 | """ |
|
381 | 393 | if name not in self.tags: |
|
382 | 394 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
383 | 395 | tagpath = vcspath.join( |
|
384 | 396 | self._remote.get_refs_path(), 'refs', 'tags', name) |
|
385 | 397 | try: |
|
386 | 398 | os.remove(tagpath) |
|
387 | 399 | self._refs = self._get_refs() |
|
388 | 400 | self.tags = self._get_tags() |
|
389 | 401 | except OSError as e: |
|
390 | 402 | raise RepositoryError(e.strerror) |
|
391 | 403 | |
|
392 | 404 | def _get_refs(self): |
|
393 | 405 | return self._remote.get_refs() |
|
394 | 406 | |
|
395 | 407 | @LazyProperty |
|
396 | 408 | def _refs(self): |
|
397 | 409 | return self._get_refs() |
|
398 | 410 | |
|
399 | 411 | @property |
|
400 | 412 | def _ref_tree(self): |
|
401 | 413 | node = tree = {} |
|
402 | 414 | for ref, sha in self._refs.iteritems(): |
|
403 | 415 | path = ref.split('/') |
|
404 | 416 | for bit in path[:-1]: |
|
405 | 417 | node = node.setdefault(bit, {}) |
|
406 | 418 | node[path[-1]] = sha |
|
407 | 419 | node = tree |
|
408 | 420 | return tree |
|
409 | 421 | |
|
410 | 422 | def get_remote_ref(self, ref_name): |
|
411 | 423 | ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name)) |
|
412 | 424 | try: |
|
413 | 425 | return self._refs[ref_key] |
|
414 | 426 | except Exception: |
|
415 | 427 | return |
|
416 | 428 | |
|
417 | 429 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): |
|
418 | 430 | """ |
|
419 | 431 | Returns `GitCommit` object representing commit from git repository |
|
420 | 432 | at the given `commit_id` or head (most recent commit) if None given. |
|
421 | 433 | """ |
|
422 | 434 | if commit_id is not None: |
|
423 | 435 | self._validate_commit_id(commit_id) |
|
424 | 436 | elif commit_idx is not None: |
|
425 | 437 | self._validate_commit_idx(commit_idx) |
|
426 | 438 | commit_id = commit_idx |
|
427 | 439 | commit_id = self._get_commit_id(commit_id) |
|
428 | 440 | try: |
|
429 | 441 | # Need to call remote to translate id for tagging scenario |
|
430 | 442 | commit_id = self._remote.get_object(commit_id)["commit_id"] |
|
431 | 443 | idx = self._commit_ids[commit_id] |
|
432 | 444 | except KeyError: |
|
433 | 445 | raise RepositoryError("Cannot get object with id %s" % commit_id) |
|
434 | 446 | |
|
435 | 447 | return GitCommit(self, commit_id, idx, pre_load=pre_load) |
|
436 | 448 | |
|
437 | 449 | def get_commits( |
|
438 | 450 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
439 | 451 | branch_name=None, show_hidden=False, pre_load=None): |
|
440 | 452 | """ |
|
441 | 453 | Returns generator of `GitCommit` objects from start to end (both |
|
442 | 454 | are inclusive), in ascending date order. |
|
443 | 455 | |
|
444 | 456 | :param start_id: None, str(commit_id) |
|
445 | 457 | :param end_id: None, str(commit_id) |
|
446 | 458 | :param start_date: if specified, commits with commit date less than |
|
447 | 459 | ``start_date`` would be filtered out from returned set |
|
448 | 460 | :param end_date: if specified, commits with commit date greater than |
|
449 | 461 | ``end_date`` would be filtered out from returned set |
|
450 | 462 | :param branch_name: if specified, commits not reachable from given |
|
451 | 463 | branch would be filtered out from returned set |
|
452 | 464 | :param show_hidden: Show hidden commits such as obsolete or hidden from |
|
453 | 465 | Mercurial evolve |
|
454 | 466 | :raise BranchDoesNotExistError: If given `branch_name` does not |
|
455 | 467 | exist. |
|
456 | 468 | :raise CommitDoesNotExistError: If commits for given `start` or |
|
457 | 469 | `end` could not be found. |
|
458 | 470 | |
|
459 | 471 | """ |
|
460 | 472 | if self.is_empty(): |
|
461 | 473 | raise EmptyRepositoryError("There are no commits yet") |
|
462 | 474 | self._validate_branch_name(branch_name) |
|
463 | 475 | |
|
464 | 476 | if start_id is not None: |
|
465 | 477 | self._validate_commit_id(start_id) |
|
466 | 478 | if end_id is not None: |
|
467 | 479 | self._validate_commit_id(end_id) |
|
468 | 480 | |
|
469 | 481 | start_raw_id = self._get_commit_id(start_id) |
|
470 | 482 | start_pos = self._commit_ids[start_raw_id] if start_id else None |
|
471 | 483 | end_raw_id = self._get_commit_id(end_id) |
|
472 | 484 | end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None |
|
473 | 485 | |
|
474 | 486 | if None not in [start_id, end_id] and start_pos > end_pos: |
|
475 | 487 | raise RepositoryError( |
|
476 | 488 | "Start commit '%s' cannot be after end commit '%s'" % |
|
477 | 489 | (start_id, end_id)) |
|
478 | 490 | |
|
479 | 491 | if end_pos is not None: |
|
480 | 492 | end_pos += 1 |
|
481 | 493 | |
|
482 | 494 | filter_ = [] |
|
483 | 495 | if branch_name: |
|
484 | 496 | filter_.append({'branch_name': branch_name}) |
|
485 | 497 | if start_date and not end_date: |
|
486 | 498 | filter_.append({'since': start_date}) |
|
487 | 499 | if end_date and not start_date: |
|
488 | 500 | filter_.append({'until': end_date}) |
|
489 | 501 | if start_date and end_date: |
|
490 | 502 | filter_.append({'since': start_date}) |
|
491 | 503 | filter_.append({'until': end_date}) |
|
492 | 504 | |
|
493 | 505 | # if start_pos or end_pos: |
|
494 | 506 | # filter_.append({'start': start_pos}) |
|
495 | 507 | # filter_.append({'end': end_pos}) |
|
496 | 508 | |
|
497 | 509 | if filter_: |
|
498 | 510 | revfilters = { |
|
499 | 511 | 'branch_name': branch_name, |
|
500 | 512 | 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None, |
|
501 | 513 | 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None, |
|
502 | 514 | 'start': start_pos, |
|
503 | 515 | 'end': end_pos, |
|
504 | 516 | } |
|
505 | 517 | commit_ids = self._get_all_commit_ids(filters=revfilters) |
|
506 | 518 | |
|
507 | 519 | # pure python stuff, it's slow due to walker walking whole repo |
|
508 | 520 | # def get_revs(walker): |
|
509 | 521 | # for walker_entry in walker: |
|
510 | 522 | # yield walker_entry.commit.id |
|
511 | 523 | # revfilters = {} |
|
512 | 524 | # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters))))) |
|
513 | 525 | else: |
|
514 | 526 | commit_ids = self.commit_ids |
|
515 | 527 | |
|
516 | 528 | if start_pos or end_pos: |
|
517 | 529 | commit_ids = commit_ids[start_pos: end_pos] |
|
518 | 530 | |
|
519 | 531 | return CollectionGenerator(self, commit_ids, pre_load=pre_load) |
|
520 | 532 | |
|
521 | 533 | def get_diff( |
|
522 | 534 | self, commit1, commit2, path='', ignore_whitespace=False, |
|
523 | 535 | context=3, path1=None): |
|
524 | 536 | """ |
|
525 | 537 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
526 | 538 | ``commit2`` since ``commit1``. |
|
527 | 539 | |
|
528 | 540 | :param commit1: Entry point from which diff is shown. Can be |
|
529 | 541 | ``self.EMPTY_COMMIT`` - in this case, patch showing all |
|
530 | 542 | the changes since empty state of the repository until ``commit2`` |
|
531 | 543 | :param commit2: Until which commits changes should be shown. |
|
532 | 544 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
533 | 545 | changes. Defaults to ``False``. |
|
534 | 546 | :param context: How many lines before/after changed lines should be |
|
535 | 547 | shown. Defaults to ``3``. |
|
536 | 548 | """ |
|
537 | 549 | self._validate_diff_commits(commit1, commit2) |
|
538 | 550 | if path1 is not None and path1 != path: |
|
539 | 551 | raise ValueError("Diff of two different paths not supported.") |
|
540 | 552 | |
|
541 | 553 | flags = [ |
|
542 | 554 | '-U%s' % context, '--full-index', '--binary', '-p', |
|
543 | 555 | '-M', '--abbrev=40'] |
|
544 | 556 | if ignore_whitespace: |
|
545 | 557 | flags.append('-w') |
|
546 | 558 | |
|
547 | 559 | if commit1 == self.EMPTY_COMMIT: |
|
548 | 560 | cmd = ['show'] + flags + [commit2.raw_id] |
|
549 | 561 | else: |
|
550 | 562 | cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id] |
|
551 | 563 | |
|
552 | 564 | if path: |
|
553 | 565 | cmd.extend(['--', path]) |
|
554 | 566 | |
|
555 | 567 | stdout, __ = self.run_git_command(cmd) |
|
556 | 568 | # If we used 'show' command, strip first few lines (until actual diff |
|
557 | 569 | # starts) |
|
558 | 570 | if commit1 == self.EMPTY_COMMIT: |
|
559 | 571 | lines = stdout.splitlines() |
|
560 | 572 | x = 0 |
|
561 | 573 | for line in lines: |
|
562 | 574 | if line.startswith('diff'): |
|
563 | 575 | break |
|
564 | 576 | x += 1 |
|
565 | 577 | # Append new line just like 'diff' command do |
|
566 | 578 | stdout = '\n'.join(lines[x:]) + '\n' |
|
567 | 579 | return GitDiff(stdout) |
|
568 | 580 | |
|
569 | 581 | def strip(self, commit_id, branch_name): |
|
570 | 582 | commit = self.get_commit(commit_id=commit_id) |
|
571 | 583 | if commit.merge: |
|
572 | 584 | raise Exception('Cannot reset to merge commit') |
|
573 | 585 | |
|
574 | 586 | # parent is going to be the new head now |
|
575 | 587 | commit = commit.parents[0] |
|
576 | 588 | self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id) |
|
577 | 589 | |
|
578 | 590 | self.commit_ids = self._get_all_commit_ids() |
|
579 | 591 | self._rebuild_cache(self.commit_ids) |
|
580 | 592 | |
|
581 | 593 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
582 | 594 | if commit_id1 == commit_id2: |
|
583 | 595 | return commit_id1 |
|
584 | 596 | |
|
585 | 597 | if self != repo2: |
|
586 | 598 | commits = self._remote.get_missing_revs( |
|
587 | 599 | commit_id1, commit_id2, repo2.path) |
|
588 | 600 | if commits: |
|
589 | 601 | commit = repo2.get_commit(commits[-1]) |
|
590 | 602 | if commit.parents: |
|
591 | 603 | ancestor_id = commit.parents[0].raw_id |
|
592 | 604 | else: |
|
593 | 605 | ancestor_id = None |
|
594 | 606 | else: |
|
595 | 607 | # no commits from other repo, ancestor_id is the commit_id2 |
|
596 | 608 | ancestor_id = commit_id2 |
|
597 | 609 | else: |
|
598 | 610 | output, __ = self.run_git_command( |
|
599 | 611 | ['merge-base', commit_id1, commit_id2]) |
|
600 | 612 | ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0] |
|
601 | 613 | |
|
602 | 614 | return ancestor_id |
|
603 | 615 | |
|
604 | 616 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
605 | 617 | repo1 = self |
|
606 | 618 | ancestor_id = None |
|
607 | 619 | |
|
608 | 620 | if commit_id1 == commit_id2: |
|
609 | 621 | commits = [] |
|
610 | 622 | elif repo1 != repo2: |
|
611 | 623 | missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2, |
|
612 | 624 | repo2.path) |
|
613 | 625 | commits = [ |
|
614 | 626 | repo2.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
615 | 627 | for commit_id in reversed(missing_ids)] |
|
616 | 628 | else: |
|
617 | 629 | output, __ = repo1.run_git_command( |
|
618 | 630 | ['log', '--reverse', '--pretty=format: %H', '-s', |
|
619 | 631 | '%s..%s' % (commit_id1, commit_id2)]) |
|
620 | 632 | commits = [ |
|
621 | 633 | repo1.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
622 | 634 | for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)] |
|
623 | 635 | |
|
624 | 636 | return commits |
|
625 | 637 | |
|
626 | 638 | @LazyProperty |
|
627 | 639 | def in_memory_commit(self): |
|
628 | 640 | """ |
|
629 | 641 | Returns ``GitInMemoryCommit`` object for this repository. |
|
630 | 642 | """ |
|
631 | 643 | return GitInMemoryCommit(self) |
|
632 | 644 | |
|
633 |
def |
|
|
645 | def pull(self, url, commit_ids=None, update_after=False): | |
|
634 | 646 | """ |
|
635 |
|
|
|
636 | ||
|
637 | :param update_after_clone: If set to ``False``, git won't checkout | |
|
638 | working directory | |
|
639 | :param bare: If set to ``True``, repository would be cloned into | |
|
640 | *bare* git repository (no working directory at all). | |
|
641 | """ | |
|
642 | # init_bare and init expect empty dir created to proceed | |
|
643 | if not os.path.exists(self.path): | |
|
644 | os.mkdir(self.path) | |
|
647 | Pull changes from external location. Pull is different in GIT | |
|
648 | that fetch since it's doing a checkout | |
|
645 | 649 |
|
|
646 | if bare: | |
|
647 | self._remote.init_bare() | |
|
648 | else: | |
|
649 | self._remote.init() | |
|
650 | ||
|
651 | deferred = '^{}' | |
|
652 | valid_refs = ('refs/heads', 'refs/tags', 'HEAD') | |
|
653 | ||
|
654 | return self._remote.clone( | |
|
655 | url, deferred, valid_refs, update_after_clone) | |
|
656 | ||
|
657 | def pull(self, url, commit_ids=None): | |
|
650 | :param commit_ids: Optional. Can be set to a list of commit ids | |
|
651 | which shall be pulled from the other repository. | |
|
658 | 652 |
|
|
659 | Tries to pull changes from external location. We use fetch here since | |
|
660 | pull in get does merges and we want to be compatible with hg backend so | |
|
661 | pull == fetch in this case | |
|
662 | """ | |
|
663 | self.fetch(url, commit_ids=commit_ids) | |
|
653 | refs = None | |
|
654 | if commit_ids is not None: | |
|
655 | remote_refs = self._remote.get_remote_refs(url) | |
|
656 | refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids] | |
|
657 | self._remote.pull(url, refs=refs, update_after=update_after) | |
|
658 | self._remote.invalidate_vcs_cache() | |
|
664 | 659 | |
|
665 | 660 | def fetch(self, url, commit_ids=None): |
|
666 | 661 | """ |
|
667 |
|
|
|
662 | Fetch all git objects from external location. | |
|
668 | 663 | """ |
|
669 | refs = None | |
|
670 | ||
|
671 | if commit_ids is not None: | |
|
672 | remote_refs = self._remote.get_remote_refs(url) | |
|
673 | refs = [ | |
|
674 | ref for ref in remote_refs if remote_refs[ref] in commit_ids] | |
|
675 | self._remote.fetch(url, refs=refs) | |
|
664 | self._remote.sync_fetch(url, refs=commit_ids) | |
|
665 | self._remote.invalidate_vcs_cache() | |
|
676 | 666 | |
|
677 | 667 | def push(self, url): |
|
678 | 668 | refs = None |
|
679 | 669 | self._remote.sync_push(url, refs=refs) |
|
680 | 670 | |
|
681 | 671 | def set_refs(self, ref_name, commit_id): |
|
682 | 672 | self._remote.set_refs(ref_name, commit_id) |
|
683 | 673 | |
|
684 | 674 | def remove_ref(self, ref_name): |
|
685 | 675 | self._remote.remove_ref(ref_name) |
|
686 | 676 | |
|
687 | 677 | def _update_server_info(self): |
|
688 | 678 | """ |
|
689 | 679 | runs gits update-server-info command in this repo instance |
|
690 | 680 | """ |
|
691 | 681 | self._remote.update_server_info() |
|
692 | 682 | |
|
693 | 683 | def _current_branch(self): |
|
694 | 684 | """ |
|
695 | 685 | Return the name of the current branch. |
|
696 | 686 | |
|
697 | 687 | It only works for non bare repositories (i.e. repositories with a |
|
698 | 688 | working copy) |
|
699 | 689 | """ |
|
700 | 690 | if self.bare: |
|
701 | 691 | raise RepositoryError('Bare git repos do not have active branches') |
|
702 | 692 | |
|
703 | 693 | if self.is_empty(): |
|
704 | 694 | return None |
|
705 | 695 | |
|
706 | 696 | stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD']) |
|
707 | 697 | return stdout.strip() |
|
708 | 698 | |
|
709 | 699 | def _checkout(self, branch_name, create=False, force=False): |
|
710 | 700 | """ |
|
711 | 701 | Checkout a branch in the working directory. |
|
712 | 702 | |
|
713 | 703 | It tries to create the branch if create is True, failing if the branch |
|
714 | 704 | already exists. |
|
715 | 705 | |
|
716 | 706 | It only works for non bare repositories (i.e. repositories with a |
|
717 | 707 | working copy) |
|
718 | 708 | """ |
|
719 | 709 | if self.bare: |
|
720 | 710 | raise RepositoryError('Cannot checkout branches in a bare git repo') |
|
721 | 711 | |
|
722 | 712 | cmd = ['checkout'] |
|
723 | 713 | if force: |
|
724 | 714 | cmd.append('-f') |
|
725 | 715 | if create: |
|
726 | 716 | cmd.append('-b') |
|
727 | 717 | cmd.append(branch_name) |
|
728 | 718 | self.run_git_command(cmd, fail_on_stderr=False) |
|
729 | 719 | |
|
730 | 720 | def _identify(self): |
|
731 | 721 | """ |
|
732 | 722 | Return the current state of the working directory. |
|
733 | 723 | """ |
|
734 | 724 | if self.bare: |
|
735 | 725 | raise RepositoryError('Bare git repos do not have active branches') |
|
736 | 726 | |
|
737 | 727 | if self.is_empty(): |
|
738 | 728 | return None |
|
739 | 729 | |
|
740 | 730 | stdout, _ = self.run_git_command(['rev-parse', 'HEAD']) |
|
741 | 731 | return stdout.strip() |
|
742 | 732 | |
|
743 | 733 | def _local_clone(self, clone_path, branch_name, source_branch=None): |
|
744 | 734 | """ |
|
745 | 735 | Create a local clone of the current repo. |
|
746 | 736 | """ |
|
747 | 737 | # N.B.(skreft): the --branch option is required as otherwise the shallow |
|
748 | 738 | # clone will only fetch the active branch. |
|
749 | 739 | cmd = ['clone', '--branch', branch_name, |
|
750 | 740 | self.path, os.path.abspath(clone_path)] |
|
751 | 741 | |
|
752 | 742 | self.run_git_command(cmd, fail_on_stderr=False) |
|
753 | 743 | |
|
754 | 744 | # if we get the different source branch, make sure we also fetch it for |
|
755 | 745 | # merge conditions |
|
756 | 746 | if source_branch and source_branch != branch_name: |
|
757 | 747 | # check if the ref exists. |
|
758 | 748 | shadow_repo = GitRepository(os.path.abspath(clone_path)) |
|
759 | 749 | if shadow_repo.get_remote_ref(source_branch): |
|
760 | 750 | cmd = ['fetch', self.path, source_branch] |
|
761 | 751 | self.run_git_command(cmd, fail_on_stderr=False) |
|
762 | 752 | |
|
763 | 753 | def _local_fetch(self, repository_path, branch_name, use_origin=False): |
|
764 | 754 | """ |
|
765 | 755 | Fetch a branch from a local repository. |
|
766 | 756 | """ |
|
767 | 757 | repository_path = os.path.abspath(repository_path) |
|
768 | 758 | if repository_path == self.path: |
|
769 | 759 | raise ValueError('Cannot fetch from the same repository') |
|
770 | 760 | |
|
771 | 761 | if use_origin: |
|
772 | 762 | branch_name = '+{branch}:refs/heads/{branch}'.format( |
|
773 | 763 | branch=branch_name) |
|
774 | 764 | |
|
775 | 765 | cmd = ['fetch', '--no-tags', '--update-head-ok', |
|
776 | 766 | repository_path, branch_name] |
|
777 | 767 | self.run_git_command(cmd, fail_on_stderr=False) |
|
778 | 768 | |
|
779 | 769 | def _local_reset(self, branch_name): |
|
780 | 770 | branch_name = '{}'.format(branch_name) |
|
781 | 771 | cmd = ['reset', '--hard', branch_name] |
|
782 | 772 | self.run_git_command(cmd, fail_on_stderr=False) |
|
783 | 773 | |
|
784 | 774 | def _last_fetch_heads(self): |
|
785 | 775 | """ |
|
786 | 776 | Return the last fetched heads that need merging. |
|
787 | 777 | |
|
788 | 778 | The algorithm is defined at |
|
789 | 779 | https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283 |
|
790 | 780 | """ |
|
791 | 781 | if not self.bare: |
|
792 | 782 | fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD') |
|
793 | 783 | else: |
|
794 | 784 | fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD') |
|
795 | 785 | |
|
796 | 786 | heads = [] |
|
797 | 787 | with open(fetch_heads_path) as f: |
|
798 | 788 | for line in f: |
|
799 | 789 | if ' not-for-merge ' in line: |
|
800 | 790 | continue |
|
801 | 791 | line = re.sub('\t.*', '', line, flags=re.DOTALL) |
|
802 | 792 | heads.append(line) |
|
803 | 793 | |
|
804 | 794 | return heads |
|
805 | 795 | |
|
806 | 796 | def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False): |
|
807 | 797 | return GitRepository(shadow_repository_path) |
|
808 | 798 | |
|
809 | 799 | def _local_pull(self, repository_path, branch_name, ff_only=True): |
|
810 | 800 | """ |
|
811 | 801 | Pull a branch from a local repository. |
|
812 | 802 | """ |
|
813 | 803 | if self.bare: |
|
814 | 804 | raise RepositoryError('Cannot pull into a bare git repository') |
|
815 | 805 | # N.B.(skreft): The --ff-only option is to make sure this is a |
|
816 | 806 | # fast-forward (i.e., we are only pulling new changes and there are no |
|
817 | 807 | # conflicts with our current branch) |
|
818 | 808 | # Additionally, that option needs to go before --no-tags, otherwise git |
|
819 | 809 | # pull complains about it being an unknown flag. |
|
820 | 810 | cmd = ['pull'] |
|
821 | 811 | if ff_only: |
|
822 | 812 | cmd.append('--ff-only') |
|
823 | 813 | cmd.extend(['--no-tags', repository_path, branch_name]) |
|
824 | 814 | self.run_git_command(cmd, fail_on_stderr=False) |
|
825 | 815 | |
|
826 | 816 | def _local_merge(self, merge_message, user_name, user_email, heads): |
|
827 | 817 | """ |
|
828 | 818 | Merge the given head into the checked out branch. |
|
829 | 819 | |
|
830 | 820 | It will force a merge commit. |
|
831 | 821 | |
|
832 | 822 | Currently it raises an error if the repo is empty, as it is not possible |
|
833 | 823 | to create a merge commit in an empty repo. |
|
834 | 824 | |
|
835 | 825 | :param merge_message: The message to use for the merge commit. |
|
836 | 826 | :param heads: the heads to merge. |
|
837 | 827 | """ |
|
838 | 828 | if self.bare: |
|
839 | 829 | raise RepositoryError('Cannot merge into a bare git repository') |
|
840 | 830 | |
|
841 | 831 | if not heads: |
|
842 | 832 | return |
|
843 | 833 | |
|
844 | 834 | if self.is_empty(): |
|
845 | 835 | # TODO(skreft): do somehting more robust in this case. |
|
846 | 836 | raise RepositoryError( |
|
847 | 837 | 'Do not know how to merge into empty repositories yet') |
|
848 | 838 | |
|
849 | 839 | # N.B.(skreft): the --no-ff option is used to enforce the creation of a |
|
850 | 840 | # commit message. We also specify the user who is doing the merge. |
|
851 | 841 | cmd = ['-c', 'user.name="%s"' % safe_str(user_name), |
|
852 | 842 | '-c', 'user.email=%s' % safe_str(user_email), |
|
853 | 843 | 'merge', '--no-ff', '-m', safe_str(merge_message)] |
|
854 | 844 | cmd.extend(heads) |
|
855 | 845 | try: |
|
856 | 846 | output = self.run_git_command(cmd, fail_on_stderr=False) |
|
857 | 847 | except RepositoryError: |
|
858 | 848 | # Cleanup any merge leftovers |
|
859 | 849 | self.run_git_command(['merge', '--abort'], fail_on_stderr=False) |
|
860 | 850 | raise |
|
861 | 851 | |
|
862 | 852 | def _local_push( |
|
863 | 853 | self, source_branch, repository_path, target_branch, |
|
864 | 854 | enable_hooks=False, rc_scm_data=None): |
|
865 | 855 | """ |
|
866 | 856 | Push the source_branch to the given repository and target_branch. |
|
867 | 857 | |
|
868 | 858 | Currently it if the target_branch is not master and the target repo is |
|
869 | 859 | empty, the push will work, but then GitRepository won't be able to find |
|
870 | 860 | the pushed branch or the commits. As the HEAD will be corrupted (i.e., |
|
871 | 861 | pointing to master, which does not exist). |
|
872 | 862 | |
|
873 | 863 | It does not run the hooks in the target repo. |
|
874 | 864 | """ |
|
875 | 865 | # TODO(skreft): deal with the case in which the target repo is empty, |
|
876 | 866 | # and the target_branch is not master. |
|
877 | 867 | target_repo = GitRepository(repository_path) |
|
878 | 868 | if (not target_repo.bare and |
|
879 | 869 | target_repo._current_branch() == target_branch): |
|
880 | 870 | # Git prevents pushing to the checked out branch, so simulate it by |
|
881 | 871 | # pulling into the target repository. |
|
882 | 872 | target_repo._local_pull(self.path, source_branch) |
|
883 | 873 | else: |
|
884 | 874 | cmd = ['push', os.path.abspath(repository_path), |
|
885 | 875 | '%s:%s' % (source_branch, target_branch)] |
|
886 | 876 | gitenv = {} |
|
887 | 877 | if rc_scm_data: |
|
888 | 878 | gitenv.update({'RC_SCM_DATA': rc_scm_data}) |
|
889 | 879 | |
|
890 | 880 | if not enable_hooks: |
|
891 | 881 | gitenv['RC_SKIP_HOOKS'] = '1' |
|
892 | 882 | self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv) |
|
893 | 883 | |
|
894 | 884 | def _get_new_pr_branch(self, source_branch, target_branch): |
|
895 | 885 | prefix = 'pr_%s-%s_' % (source_branch, target_branch) |
|
896 | 886 | pr_branches = [] |
|
897 | 887 | for branch in self.branches: |
|
898 | 888 | if branch.startswith(prefix): |
|
899 | 889 | pr_branches.append(int(branch[len(prefix):])) |
|
900 | 890 | |
|
901 | 891 | if not pr_branches: |
|
902 | 892 | branch_id = 0 |
|
903 | 893 | else: |
|
904 | 894 | branch_id = max(pr_branches) + 1 |
|
905 | 895 | |
|
906 | 896 | return '%s%d' % (prefix, branch_id) |
|
907 | 897 | |
|
908 | 898 | def _maybe_prepare_merge_workspace( |
|
909 | 899 | self, repo_id, workspace_id, target_ref, source_ref): |
|
910 | 900 | shadow_repository_path = self._get_shadow_repository_path( |
|
911 | 901 | repo_id, workspace_id) |
|
912 | 902 | if not os.path.exists(shadow_repository_path): |
|
913 | 903 | self._local_clone( |
|
914 | 904 | shadow_repository_path, target_ref.name, source_ref.name) |
|
915 | 905 | log.debug( |
|
916 | 906 | 'Prepared shadow repository in %s', shadow_repository_path) |
|
917 | 907 | |
|
918 | 908 | return shadow_repository_path |
|
919 | 909 | |
|
920 | 910 | def _merge_repo(self, repo_id, workspace_id, target_ref, |
|
921 | 911 | source_repo, source_ref, merge_message, |
|
922 | 912 | merger_name, merger_email, dry_run=False, |
|
923 | 913 | use_rebase=False, close_branch=False): |
|
924 | 914 | if target_ref.commit_id != self.branches[target_ref.name]: |
|
925 | 915 | log.warning('Target ref %s commit mismatch %s vs %s', target_ref, |
|
926 | 916 | target_ref.commit_id, self.branches[target_ref.name]) |
|
927 | 917 | return MergeResponse( |
|
928 | 918 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD) |
|
929 | 919 | |
|
930 | 920 | shadow_repository_path = self._maybe_prepare_merge_workspace( |
|
931 | 921 | repo_id, workspace_id, target_ref, source_ref) |
|
932 | 922 | shadow_repo = self._get_shadow_instance(shadow_repository_path) |
|
933 | 923 | |
|
934 | 924 | # checkout source, if it's different. Otherwise we could not |
|
935 | 925 | # fetch proper commits for merge testing |
|
936 | 926 | if source_ref.name != target_ref.name: |
|
937 | 927 | if shadow_repo.get_remote_ref(source_ref.name): |
|
938 | 928 | shadow_repo._checkout(source_ref.name, force=True) |
|
939 | 929 | |
|
940 | 930 | # checkout target, and fetch changes |
|
941 | 931 | shadow_repo._checkout(target_ref.name, force=True) |
|
942 | 932 | |
|
943 | 933 | # fetch/reset pull the target, in case it is changed |
|
944 | 934 | # this handles even force changes |
|
945 | 935 | shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True) |
|
946 | 936 | shadow_repo._local_reset(target_ref.name) |
|
947 | 937 | |
|
948 | 938 | # Need to reload repo to invalidate the cache, or otherwise we cannot |
|
949 | 939 | # retrieve the last target commit. |
|
950 | 940 | shadow_repo = self._get_shadow_instance(shadow_repository_path) |
|
951 | 941 | if target_ref.commit_id != shadow_repo.branches[target_ref.name]: |
|
952 | 942 | log.warning('Shadow Target ref %s commit mismatch %s vs %s', |
|
953 | 943 | target_ref, target_ref.commit_id, |
|
954 | 944 | shadow_repo.branches[target_ref.name]) |
|
955 | 945 | return MergeResponse( |
|
956 | 946 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD) |
|
957 | 947 | |
|
958 | 948 | # calculate new branch |
|
959 | 949 | pr_branch = shadow_repo._get_new_pr_branch( |
|
960 | 950 | source_ref.name, target_ref.name) |
|
961 | 951 | log.debug('using pull-request merge branch: `%s`', pr_branch) |
|
962 | 952 | # checkout to temp branch, and fetch changes |
|
963 | 953 | shadow_repo._checkout(pr_branch, create=True) |
|
964 | 954 | try: |
|
965 | 955 | shadow_repo._local_fetch(source_repo.path, source_ref.name) |
|
966 | 956 | except RepositoryError: |
|
967 | 957 | log.exception('Failure when doing local fetch on git shadow repo') |
|
968 | 958 | return MergeResponse( |
|
969 | 959 | False, False, None, MergeFailureReason.MISSING_SOURCE_REF) |
|
970 | 960 | |
|
971 | 961 | merge_ref = None |
|
972 | 962 | merge_failure_reason = MergeFailureReason.NONE |
|
973 | 963 | try: |
|
974 | 964 | shadow_repo._local_merge(merge_message, merger_name, merger_email, |
|
975 | 965 | [source_ref.commit_id]) |
|
976 | 966 | merge_possible = True |
|
977 | 967 | |
|
978 | 968 | # Need to reload repo to invalidate the cache, or otherwise we |
|
979 | 969 | # cannot retrieve the merge commit. |
|
980 | 970 | shadow_repo = GitRepository(shadow_repository_path) |
|
981 | 971 | merge_commit_id = shadow_repo.branches[pr_branch] |
|
982 | 972 | |
|
983 | 973 | # Set a reference pointing to the merge commit. This reference may |
|
984 | 974 | # be used to easily identify the last successful merge commit in |
|
985 | 975 | # the shadow repository. |
|
986 | 976 | shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id) |
|
987 | 977 | merge_ref = Reference('branch', 'pr-merge', merge_commit_id) |
|
988 | 978 | except RepositoryError: |
|
989 | 979 | log.exception('Failure when doing local merge on git shadow repo') |
|
990 | 980 | merge_possible = False |
|
991 | 981 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
992 | 982 | |
|
993 | 983 | if merge_possible and not dry_run: |
|
994 | 984 | try: |
|
995 | 985 | shadow_repo._local_push( |
|
996 | 986 | pr_branch, self.path, target_ref.name, enable_hooks=True, |
|
997 | 987 | rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA')) |
|
998 | 988 | merge_succeeded = True |
|
999 | 989 | except RepositoryError: |
|
1000 | 990 | log.exception( |
|
1001 | 991 | 'Failure when doing local push on git shadow repo') |
|
1002 | 992 | merge_succeeded = False |
|
1003 | 993 | merge_failure_reason = MergeFailureReason.PUSH_FAILED |
|
1004 | 994 | else: |
|
1005 | 995 | merge_succeeded = False |
|
1006 | 996 | |
|
1007 | 997 | return MergeResponse( |
|
1008 | 998 | merge_possible, merge_succeeded, merge_ref, |
|
1009 | 999 | merge_failure_reason) |
@@ -1,917 +1,924 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | HG repository module |
|
23 | 23 | """ |
|
24 | 24 | import os |
|
25 | 25 | import logging |
|
26 | 26 | import binascii |
|
27 | 27 | import urllib |
|
28 | 28 | |
|
29 | 29 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | 30 | |
|
31 | 31 | from rhodecode.lib.compat import OrderedDict |
|
32 | 32 | from rhodecode.lib.datelib import ( |
|
33 | 33 | date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate) |
|
34 | 34 | from rhodecode.lib.utils import safe_unicode, safe_str |
|
35 | 35 | from rhodecode.lib.vcs import connection, exceptions |
|
36 | 36 | from rhodecode.lib.vcs.backends.base import ( |
|
37 | 37 | BaseRepository, CollectionGenerator, Config, MergeResponse, |
|
38 | 38 | MergeFailureReason, Reference, BasePathPermissionChecker) |
|
39 | 39 | from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit |
|
40 | 40 | from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff |
|
41 | 41 | from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit |
|
42 | 42 | from rhodecode.lib.vcs.exceptions import ( |
|
43 | 43 | EmptyRepositoryError, RepositoryError, TagAlreadyExistError, |
|
44 | 44 | TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError) |
|
45 | 45 | from rhodecode.lib.vcs.compat import configparser |
|
46 | 46 | |
|
47 | 47 | hexlify = binascii.hexlify |
|
48 | 48 | nullid = "\0" * 20 |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class MercurialRepository(BaseRepository): |
|
54 | 54 | """ |
|
55 | 55 | Mercurial repository backend |
|
56 | 56 | """ |
|
57 | 57 | DEFAULT_BRANCH_NAME = 'default' |
|
58 | 58 | |
|
59 | 59 | def __init__(self, repo_path, config=None, create=False, src_url=None, |
|
60 |
|
|
|
60 | do_workspace_checkout=False, with_wire=None, bare=False): | |
|
61 | 61 | """ |
|
62 | 62 | Raises RepositoryError if repository could not be find at the given |
|
63 | 63 | ``repo_path``. |
|
64 | 64 | |
|
65 | 65 | :param repo_path: local path of the repository |
|
66 | 66 | :param config: config object containing the repo configuration |
|
67 | 67 | :param create=False: if set to True, would try to create repository if |
|
68 | 68 | it does not exist rather than raising exception |
|
69 | 69 | :param src_url=None: would try to clone repository from given location |
|
70 |
:param |
|
|
70 | :param do_workspace_checkout=False: sets update of working copy after | |
|
71 | 71 | making a clone |
|
72 | :param bare: not used, compatible with other VCS | |
|
72 | 73 | """ |
|
73 | 74 | |
|
74 | 75 | self.path = safe_str(os.path.abspath(repo_path)) |
|
75 | 76 | # mercurial since 4.4.X requires certain configuration to be present |
|
76 | 77 | # because sometimes we init the repos with config we need to meet |
|
77 | 78 | # special requirements |
|
78 | 79 | self.config = config if config else self.get_default_config( |
|
79 | 80 | default=[('extensions', 'largefiles', '1')]) |
|
80 | 81 | self.with_wire = with_wire |
|
81 | 82 | |
|
82 |
self._init_repo(create, src_url, |
|
|
83 | self._init_repo(create, src_url, do_workspace_checkout) | |
|
83 | 84 | |
|
84 | 85 | # caches |
|
85 | 86 | self._commit_ids = {} |
|
86 | 87 | |
|
87 | 88 | @LazyProperty |
|
88 | 89 | def _remote(self): |
|
89 | 90 | return connection.Hg(self.path, self.config, with_wire=self.with_wire) |
|
90 | 91 | |
|
91 | 92 | @LazyProperty |
|
92 | 93 | def commit_ids(self): |
|
93 | 94 | """ |
|
94 | 95 | Returns list of commit ids, in ascending order. Being lazy |
|
95 | 96 | attribute allows external tools to inject shas from cache. |
|
96 | 97 | """ |
|
97 | 98 | commit_ids = self._get_all_commit_ids() |
|
98 | 99 | self._rebuild_cache(commit_ids) |
|
99 | 100 | return commit_ids |
|
100 | 101 | |
|
101 | 102 | def _rebuild_cache(self, commit_ids): |
|
102 | 103 | self._commit_ids = dict((commit_id, index) |
|
103 | 104 | for index, commit_id in enumerate(commit_ids)) |
|
104 | 105 | |
|
105 | 106 | @LazyProperty |
|
106 | 107 | def branches(self): |
|
107 | 108 | return self._get_branches() |
|
108 | 109 | |
|
109 | 110 | @LazyProperty |
|
110 | 111 | def branches_closed(self): |
|
111 | 112 | return self._get_branches(active=False, closed=True) |
|
112 | 113 | |
|
113 | 114 | @LazyProperty |
|
114 | 115 | def branches_all(self): |
|
115 | 116 | all_branches = {} |
|
116 | 117 | all_branches.update(self.branches) |
|
117 | 118 | all_branches.update(self.branches_closed) |
|
118 | 119 | return all_branches |
|
119 | 120 | |
|
120 | 121 | def _get_branches(self, active=True, closed=False): |
|
121 | 122 | """ |
|
122 | 123 | Gets branches for this repository |
|
123 | 124 | Returns only not closed active branches by default |
|
124 | 125 | |
|
125 | 126 | :param active: return also active branches |
|
126 | 127 | :param closed: return also closed branches |
|
127 | 128 | |
|
128 | 129 | """ |
|
129 | 130 | if self.is_empty(): |
|
130 | 131 | return {} |
|
131 | 132 | |
|
132 | 133 | def get_name(ctx): |
|
133 | 134 | return ctx[0] |
|
134 | 135 | |
|
135 | 136 | _branches = [(safe_unicode(n), hexlify(h),) for n, h in |
|
136 | 137 | self._remote.branches(active, closed).items()] |
|
137 | 138 | |
|
138 | 139 | return OrderedDict(sorted(_branches, key=get_name, reverse=False)) |
|
139 | 140 | |
|
140 | 141 | @LazyProperty |
|
141 | 142 | def tags(self): |
|
142 | 143 | """ |
|
143 | 144 | Gets tags for this repository |
|
144 | 145 | """ |
|
145 | 146 | return self._get_tags() |
|
146 | 147 | |
|
147 | 148 | def _get_tags(self): |
|
148 | 149 | if self.is_empty(): |
|
149 | 150 | return {} |
|
150 | 151 | |
|
151 | 152 | def get_name(ctx): |
|
152 | 153 | return ctx[0] |
|
153 | 154 | |
|
154 | 155 | _tags = [(safe_unicode(n), hexlify(h),) for n, h in |
|
155 | 156 | self._remote.tags().items()] |
|
156 | 157 | |
|
157 | 158 | return OrderedDict(sorted(_tags, key=get_name, reverse=True)) |
|
158 | 159 | |
|
159 | 160 | def tag(self, name, user, commit_id=None, message=None, date=None, |
|
160 | 161 | **kwargs): |
|
161 | 162 | """ |
|
162 | 163 | Creates and returns a tag for the given ``commit_id``. |
|
163 | 164 | |
|
164 | 165 | :param name: name for new tag |
|
165 | 166 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
166 | 167 | :param commit_id: commit id for which new tag would be created |
|
167 | 168 | :param message: message of the tag's commit |
|
168 | 169 | :param date: date of tag's commit |
|
169 | 170 | |
|
170 | 171 | :raises TagAlreadyExistError: if tag with same name already exists |
|
171 | 172 | """ |
|
172 | 173 | if name in self.tags: |
|
173 | 174 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
174 | 175 | commit = self.get_commit(commit_id=commit_id) |
|
175 | 176 | local = kwargs.setdefault('local', False) |
|
176 | 177 | |
|
177 | 178 | if message is None: |
|
178 | 179 | message = "Added tag %s for commit %s" % (name, commit.short_id) |
|
179 | 180 | |
|
180 | 181 | date, tz = date_to_timestamp_plus_offset(date) |
|
181 | 182 | |
|
182 | 183 | self._remote.tag( |
|
183 | 184 | name, commit.raw_id, message, local, user, date, tz) |
|
184 | 185 | self._remote.invalidate_vcs_cache() |
|
185 | 186 | |
|
186 | 187 | # Reinitialize tags |
|
187 | 188 | self.tags = self._get_tags() |
|
188 | 189 | tag_id = self.tags[name] |
|
189 | 190 | |
|
190 | 191 | return self.get_commit(commit_id=tag_id) |
|
191 | 192 | |
|
192 | 193 | def remove_tag(self, name, user, message=None, date=None): |
|
193 | 194 | """ |
|
194 | 195 | Removes tag with the given `name`. |
|
195 | 196 | |
|
196 | 197 | :param name: name of the tag to be removed |
|
197 | 198 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
198 | 199 | :param message: message of the tag's removal commit |
|
199 | 200 | :param date: date of tag's removal commit |
|
200 | 201 | |
|
201 | 202 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
202 | 203 | """ |
|
203 | 204 | if name not in self.tags: |
|
204 | 205 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
205 | 206 | if message is None: |
|
206 | 207 | message = "Removed tag %s" % name |
|
207 | 208 | local = False |
|
208 | 209 | |
|
209 | 210 | date, tz = date_to_timestamp_plus_offset(date) |
|
210 | 211 | |
|
211 | 212 | self._remote.tag(name, nullid, message, local, user, date, tz) |
|
212 | 213 | self._remote.invalidate_vcs_cache() |
|
213 | 214 | self.tags = self._get_tags() |
|
214 | 215 | |
|
215 | 216 | @LazyProperty |
|
216 | 217 | def bookmarks(self): |
|
217 | 218 | """ |
|
218 | 219 | Gets bookmarks for this repository |
|
219 | 220 | """ |
|
220 | 221 | return self._get_bookmarks() |
|
221 | 222 | |
|
222 | 223 | def _get_bookmarks(self): |
|
223 | 224 | if self.is_empty(): |
|
224 | 225 | return {} |
|
225 | 226 | |
|
226 | 227 | def get_name(ctx): |
|
227 | 228 | return ctx[0] |
|
228 | 229 | |
|
229 | 230 | _bookmarks = [ |
|
230 | 231 | (safe_unicode(n), hexlify(h)) for n, h in |
|
231 | 232 | self._remote.bookmarks().items()] |
|
232 | 233 | |
|
233 | 234 | return OrderedDict(sorted(_bookmarks, key=get_name)) |
|
234 | 235 | |
|
235 | 236 | def _get_all_commit_ids(self): |
|
236 | 237 | return self._remote.get_all_commit_ids('visible') |
|
237 | 238 | |
|
238 | 239 | def get_diff( |
|
239 | 240 | self, commit1, commit2, path='', ignore_whitespace=False, |
|
240 | 241 | context=3, path1=None): |
|
241 | 242 | """ |
|
242 | 243 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
243 | 244 | `commit2` since `commit1`. |
|
244 | 245 | |
|
245 | 246 | :param commit1: Entry point from which diff is shown. Can be |
|
246 | 247 | ``self.EMPTY_COMMIT`` - in this case, patch showing all |
|
247 | 248 | the changes since empty state of the repository until `commit2` |
|
248 | 249 | :param commit2: Until which commit changes should be shown. |
|
249 | 250 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
250 | 251 | changes. Defaults to ``False``. |
|
251 | 252 | :param context: How many lines before/after changed lines should be |
|
252 | 253 | shown. Defaults to ``3``. |
|
253 | 254 | """ |
|
254 | 255 | self._validate_diff_commits(commit1, commit2) |
|
255 | 256 | if path1 is not None and path1 != path: |
|
256 | 257 | raise ValueError("Diff of two different paths not supported.") |
|
257 | 258 | |
|
258 | 259 | if path: |
|
259 | 260 | file_filter = [self.path, path] |
|
260 | 261 | else: |
|
261 | 262 | file_filter = None |
|
262 | 263 | |
|
263 | 264 | diff = self._remote.diff( |
|
264 | 265 | commit1.raw_id, commit2.raw_id, file_filter=file_filter, |
|
265 | 266 | opt_git=True, opt_ignorews=ignore_whitespace, |
|
266 | 267 | context=context) |
|
267 | 268 | return MercurialDiff(diff) |
|
268 | 269 | |
|
269 | 270 | def strip(self, commit_id, branch=None): |
|
270 | 271 | self._remote.strip(commit_id, update=False, backup="none") |
|
271 | 272 | |
|
272 | 273 | self._remote.invalidate_vcs_cache() |
|
273 | 274 | self.commit_ids = self._get_all_commit_ids() |
|
274 | 275 | self._rebuild_cache(self.commit_ids) |
|
275 | 276 | |
|
276 | 277 | def verify(self): |
|
277 | 278 | verify = self._remote.verify() |
|
278 | 279 | |
|
279 | 280 | self._remote.invalidate_vcs_cache() |
|
280 | 281 | return verify |
|
281 | 282 | |
|
282 | 283 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
283 | 284 | if commit_id1 == commit_id2: |
|
284 | 285 | return commit_id1 |
|
285 | 286 | |
|
286 | 287 | ancestors = self._remote.revs_from_revspec( |
|
287 | 288 | "ancestor(id(%s), id(%s))", commit_id1, commit_id2, |
|
288 | 289 | other_path=repo2.path) |
|
289 | 290 | return repo2[ancestors[0]].raw_id if ancestors else None |
|
290 | 291 | |
|
291 | 292 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
292 | 293 | if commit_id1 == commit_id2: |
|
293 | 294 | commits = [] |
|
294 | 295 | else: |
|
295 | 296 | if merge: |
|
296 | 297 | indexes = self._remote.revs_from_revspec( |
|
297 | 298 | "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)", |
|
298 | 299 | commit_id2, commit_id1, commit_id1, other_path=repo2.path) |
|
299 | 300 | else: |
|
300 | 301 | indexes = self._remote.revs_from_revspec( |
|
301 | 302 | "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2, |
|
302 | 303 | commit_id1, other_path=repo2.path) |
|
303 | 304 | |
|
304 | 305 | commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load) |
|
305 | 306 | for idx in indexes] |
|
306 | 307 | |
|
307 | 308 | return commits |
|
308 | 309 | |
|
309 | 310 | @staticmethod |
|
310 | 311 | def check_url(url, config): |
|
311 | 312 | """ |
|
312 | 313 | Function will check given url and try to verify if it's a valid |
|
313 | 314 | link. Sometimes it may happened that mercurial will issue basic |
|
314 | 315 | auth request that can cause whole API to hang when used from python |
|
315 | 316 | or other external calls. |
|
316 | 317 | |
|
317 | 318 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
318 | 319 | when the return code is non 200 |
|
319 | 320 | """ |
|
320 | 321 | # check first if it's not an local url |
|
321 | 322 | if os.path.isdir(url) or url.startswith('file:'): |
|
322 | 323 | return True |
|
323 | 324 | |
|
324 | 325 | # Request the _remote to verify the url |
|
325 | 326 | return connection.Hg.check_url(url, config.serialize()) |
|
326 | 327 | |
|
327 | 328 | @staticmethod |
|
328 | 329 | def is_valid_repository(path): |
|
329 | 330 | return os.path.isdir(os.path.join(path, '.hg')) |
|
330 | 331 | |
|
331 |
def _init_repo(self, create, src_url=None, |
|
|
332 | def _init_repo(self, create, src_url=None, do_workspace_checkout=False): | |
|
332 | 333 | """ |
|
333 | 334 | Function will check for mercurial repository in given path. If there |
|
334 | 335 | is no repository in that path it will raise an exception unless |
|
335 | 336 | `create` parameter is set to True - in that case repository would |
|
336 | 337 | be created. |
|
337 | 338 | |
|
338 | 339 | If `src_url` is given, would try to clone repository from the |
|
339 | 340 | location at given clone_point. Additionally it'll make update to |
|
340 |
working copy accordingly to ` |
|
|
341 | working copy accordingly to `do_workspace_checkout` flag. | |
|
341 | 342 | """ |
|
342 | 343 | if create and os.path.exists(self.path): |
|
343 | 344 | raise RepositoryError( |
|
344 | 345 | "Cannot create repository at %s, location already exist" |
|
345 | 346 | % self.path) |
|
346 | 347 | |
|
347 | 348 | if src_url: |
|
348 | 349 | url = str(self._get_url(src_url)) |
|
349 | 350 | MercurialRepository.check_url(url, self.config) |
|
350 | 351 | |
|
351 |
self._remote.clone(url, self.path, |
|
|
352 | self._remote.clone(url, self.path, do_workspace_checkout) | |
|
352 | 353 | |
|
353 | 354 | # Don't try to create if we've already cloned repo |
|
354 | 355 | create = False |
|
355 | 356 | |
|
356 | 357 | if create: |
|
357 | 358 | os.makedirs(self.path, mode=0755) |
|
358 | 359 | |
|
359 | 360 | self._remote.localrepository(create) |
|
360 | 361 | |
|
361 | 362 | @LazyProperty |
|
362 | 363 | def in_memory_commit(self): |
|
363 | 364 | return MercurialInMemoryCommit(self) |
|
364 | 365 | |
|
365 | 366 | @LazyProperty |
|
366 | 367 | def description(self): |
|
367 | 368 | description = self._remote.get_config_value( |
|
368 | 369 | 'web', 'description', untrusted=True) |
|
369 | 370 | return safe_unicode(description or self.DEFAULT_DESCRIPTION) |
|
370 | 371 | |
|
371 | 372 | @LazyProperty |
|
372 | 373 | def contact(self): |
|
373 | 374 | contact = ( |
|
374 | 375 | self._remote.get_config_value("web", "contact") or |
|
375 | 376 | self._remote.get_config_value("ui", "username")) |
|
376 | 377 | return safe_unicode(contact or self.DEFAULT_CONTACT) |
|
377 | 378 | |
|
378 | 379 | @LazyProperty |
|
379 | 380 | def last_change(self): |
|
380 | 381 | """ |
|
381 | 382 | Returns last change made on this repository as |
|
382 | 383 | `datetime.datetime` object. |
|
383 | 384 | """ |
|
384 | 385 | try: |
|
385 | 386 | return self.get_commit().date |
|
386 | 387 | except RepositoryError: |
|
387 | 388 | tzoffset = makedate()[1] |
|
388 | 389 | return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset) |
|
389 | 390 | |
|
390 | 391 | def _get_fs_mtime(self): |
|
391 | 392 | # fallback to filesystem |
|
392 | 393 | cl_path = os.path.join(self.path, '.hg', "00changelog.i") |
|
393 | 394 | st_path = os.path.join(self.path, '.hg', "store") |
|
394 | 395 | if os.path.exists(cl_path): |
|
395 | 396 | return os.stat(cl_path).st_mtime |
|
396 | 397 | else: |
|
397 | 398 | return os.stat(st_path).st_mtime |
|
398 | 399 | |
|
399 | 400 | def _get_url(self, url): |
|
400 | 401 | """ |
|
401 | 402 | Returns normalized url. If schema is not given, would fall |
|
402 | 403 | to filesystem |
|
403 | 404 | (``file:///``) schema. |
|
404 | 405 | """ |
|
405 | 406 | url = url.encode('utf8') |
|
406 | 407 | if url != 'default' and '://' not in url: |
|
407 | 408 | url = "file:" + urllib.pathname2url(url) |
|
408 | 409 | return url |
|
409 | 410 | |
|
410 | 411 | def get_hook_location(self): |
|
411 | 412 | """ |
|
412 | 413 | returns absolute path to location where hooks are stored |
|
413 | 414 | """ |
|
414 | 415 | return os.path.join(self.path, '.hg', '.hgrc') |
|
415 | 416 | |
|
416 | 417 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): |
|
417 | 418 | """ |
|
418 | 419 | Returns ``MercurialCommit`` object representing repository's |
|
419 | 420 | commit at the given `commit_id` or `commit_idx`. |
|
420 | 421 | """ |
|
421 | 422 | if self.is_empty(): |
|
422 | 423 | raise EmptyRepositoryError("There are no commits yet") |
|
423 | 424 | |
|
424 | 425 | if commit_id is not None: |
|
425 | 426 | self._validate_commit_id(commit_id) |
|
426 | 427 | try: |
|
427 | 428 | idx = self._commit_ids[commit_id] |
|
428 | 429 | return MercurialCommit(self, commit_id, idx, pre_load=pre_load) |
|
429 | 430 | except KeyError: |
|
430 | 431 | pass |
|
431 | 432 | elif commit_idx is not None: |
|
432 | 433 | self._validate_commit_idx(commit_idx) |
|
433 | 434 | try: |
|
434 | 435 | id_ = self.commit_ids[commit_idx] |
|
435 | 436 | if commit_idx < 0: |
|
436 | 437 | commit_idx += len(self.commit_ids) |
|
437 | 438 | return MercurialCommit( |
|
438 | 439 | self, id_, commit_idx, pre_load=pre_load) |
|
439 | 440 | except IndexError: |
|
440 | 441 | commit_id = commit_idx |
|
441 | 442 | else: |
|
442 | 443 | commit_id = "tip" |
|
443 | 444 | |
|
444 | 445 | if isinstance(commit_id, unicode): |
|
445 | 446 | commit_id = safe_str(commit_id) |
|
446 | 447 | |
|
447 | 448 | try: |
|
448 | 449 | raw_id, idx = self._remote.lookup(commit_id, both=True) |
|
449 | 450 | except CommitDoesNotExistError: |
|
450 | 451 | msg = "Commit %s does not exist for %s" % ( |
|
451 | 452 | commit_id, self) |
|
452 | 453 | raise CommitDoesNotExistError(msg) |
|
453 | 454 | |
|
454 | 455 | return MercurialCommit(self, raw_id, idx, pre_load=pre_load) |
|
455 | 456 | |
|
456 | 457 | def get_commits( |
|
457 | 458 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
458 | 459 | branch_name=None, show_hidden=False, pre_load=None): |
|
459 | 460 | """ |
|
460 | 461 | Returns generator of ``MercurialCommit`` objects from start to end |
|
461 | 462 | (both are inclusive) |
|
462 | 463 | |
|
463 | 464 | :param start_id: None, str(commit_id) |
|
464 | 465 | :param end_id: None, str(commit_id) |
|
465 | 466 | :param start_date: if specified, commits with commit date less than |
|
466 | 467 | ``start_date`` would be filtered out from returned set |
|
467 | 468 | :param end_date: if specified, commits with commit date greater than |
|
468 | 469 | ``end_date`` would be filtered out from returned set |
|
469 | 470 | :param branch_name: if specified, commits not reachable from given |
|
470 | 471 | branch would be filtered out from returned set |
|
471 | 472 | :param show_hidden: Show hidden commits such as obsolete or hidden from |
|
472 | 473 | Mercurial evolve |
|
473 | 474 | :raise BranchDoesNotExistError: If given ``branch_name`` does not |
|
474 | 475 | exist. |
|
475 | 476 | :raise CommitDoesNotExistError: If commit for given ``start`` or |
|
476 | 477 | ``end`` could not be found. |
|
477 | 478 | """ |
|
478 | 479 | # actually we should check now if it's not an empty repo |
|
479 | 480 | branch_ancestors = False |
|
480 | 481 | if self.is_empty(): |
|
481 | 482 | raise EmptyRepositoryError("There are no commits yet") |
|
482 | 483 | self._validate_branch_name(branch_name) |
|
483 | 484 | |
|
484 | 485 | if start_id is not None: |
|
485 | 486 | self._validate_commit_id(start_id) |
|
486 | 487 | c_start = self.get_commit(commit_id=start_id) |
|
487 | 488 | start_pos = self._commit_ids[c_start.raw_id] |
|
488 | 489 | else: |
|
489 | 490 | start_pos = None |
|
490 | 491 | |
|
491 | 492 | if end_id is not None: |
|
492 | 493 | self._validate_commit_id(end_id) |
|
493 | 494 | c_end = self.get_commit(commit_id=end_id) |
|
494 | 495 | end_pos = max(0, self._commit_ids[c_end.raw_id]) |
|
495 | 496 | else: |
|
496 | 497 | end_pos = None |
|
497 | 498 | |
|
498 | 499 | if None not in [start_id, end_id] and start_pos > end_pos: |
|
499 | 500 | raise RepositoryError( |
|
500 | 501 | "Start commit '%s' cannot be after end commit '%s'" % |
|
501 | 502 | (start_id, end_id)) |
|
502 | 503 | |
|
503 | 504 | if end_pos is not None: |
|
504 | 505 | end_pos += 1 |
|
505 | 506 | |
|
506 | 507 | commit_filter = [] |
|
507 | 508 | |
|
508 | 509 | if branch_name and not branch_ancestors: |
|
509 | 510 | commit_filter.append('branch("%s")' % (branch_name,)) |
|
510 | 511 | elif branch_name and branch_ancestors: |
|
511 | 512 | commit_filter.append('ancestors(branch("%s"))' % (branch_name,)) |
|
512 | 513 | |
|
513 | 514 | if start_date and not end_date: |
|
514 | 515 | commit_filter.append('date(">%s")' % (start_date,)) |
|
515 | 516 | if end_date and not start_date: |
|
516 | 517 | commit_filter.append('date("<%s")' % (end_date,)) |
|
517 | 518 | if start_date and end_date: |
|
518 | 519 | commit_filter.append( |
|
519 | 520 | 'date(">%s") and date("<%s")' % (start_date, end_date)) |
|
520 | 521 | |
|
521 | 522 | if not show_hidden: |
|
522 | 523 | commit_filter.append('not obsolete()') |
|
523 | 524 | commit_filter.append('not hidden()') |
|
524 | 525 | |
|
525 | 526 | # TODO: johbo: Figure out a simpler way for this solution |
|
526 | 527 | collection_generator = CollectionGenerator |
|
527 | 528 | if commit_filter: |
|
528 | 529 | commit_filter = ' and '.join(map(safe_str, commit_filter)) |
|
529 | 530 | revisions = self._remote.rev_range([commit_filter]) |
|
530 | 531 | collection_generator = MercurialIndexBasedCollectionGenerator |
|
531 | 532 | else: |
|
532 | 533 | revisions = self.commit_ids |
|
533 | 534 | |
|
534 | 535 | if start_pos or end_pos: |
|
535 | 536 | revisions = revisions[start_pos:end_pos] |
|
536 | 537 | |
|
537 | 538 | return collection_generator(self, revisions, pre_load=pre_load) |
|
538 | 539 | |
|
539 | 540 | def pull(self, url, commit_ids=None): |
|
540 | 541 | """ |
|
541 |
|
|
|
542 | Pull changes from external location. | |
|
542 | 543 | |
|
543 | 544 | :param commit_ids: Optional. Can be set to a list of commit ids |
|
544 | 545 | which shall be pulled from the other repository. |
|
545 | 546 | """ |
|
546 | 547 | url = self._get_url(url) |
|
547 | 548 | self._remote.pull(url, commit_ids=commit_ids) |
|
548 | 549 | self._remote.invalidate_vcs_cache() |
|
549 | 550 | |
|
551 | def fetch(self, url, commit_ids=None): | |
|
552 | """ | |
|
553 | Backward compatibility with GIT fetch==pull | |
|
554 | """ | |
|
555 | return self.pull(url, commit_ids=commit_ids) | |
|
556 | ||
|
550 | 557 | def push(self, url): |
|
551 | 558 | url = self._get_url(url) |
|
552 | 559 | self._remote.sync_push(url) |
|
553 | 560 | |
|
554 | 561 | def _local_clone(self, clone_path): |
|
555 | 562 | """ |
|
556 | 563 | Create a local clone of the current repo. |
|
557 | 564 | """ |
|
558 | 565 | self._remote.clone(self.path, clone_path, update_after_clone=True, |
|
559 | 566 | hooks=False) |
|
560 | 567 | |
|
561 | 568 | def _update(self, revision, clean=False): |
|
562 | 569 | """ |
|
563 | 570 | Update the working copy to the specified revision. |
|
564 | 571 | """ |
|
565 | 572 | log.debug('Doing checkout to commit: `%s` for %s', revision, self) |
|
566 | 573 | self._remote.update(revision, clean=clean) |
|
567 | 574 | |
|
568 | 575 | def _identify(self): |
|
569 | 576 | """ |
|
570 | 577 | Return the current state of the working directory. |
|
571 | 578 | """ |
|
572 | 579 | return self._remote.identify().strip().rstrip('+') |
|
573 | 580 | |
|
574 | 581 | def _heads(self, branch=None): |
|
575 | 582 | """ |
|
576 | 583 | Return the commit ids of the repository heads. |
|
577 | 584 | """ |
|
578 | 585 | return self._remote.heads(branch=branch).strip().split(' ') |
|
579 | 586 | |
|
580 | 587 | def _ancestor(self, revision1, revision2): |
|
581 | 588 | """ |
|
582 | 589 | Return the common ancestor of the two revisions. |
|
583 | 590 | """ |
|
584 | 591 | return self._remote.ancestor(revision1, revision2) |
|
585 | 592 | |
|
586 | 593 | def _local_push( |
|
587 | 594 | self, revision, repository_path, push_branches=False, |
|
588 | 595 | enable_hooks=False): |
|
589 | 596 | """ |
|
590 | 597 | Push the given revision to the specified repository. |
|
591 | 598 | |
|
592 | 599 | :param push_branches: allow to create branches in the target repo. |
|
593 | 600 | """ |
|
594 | 601 | self._remote.push( |
|
595 | 602 | [revision], repository_path, hooks=enable_hooks, |
|
596 | 603 | push_branches=push_branches) |
|
597 | 604 | |
|
598 | 605 | def _local_merge(self, target_ref, merge_message, user_name, user_email, |
|
599 | 606 | source_ref, use_rebase=False, dry_run=False): |
|
600 | 607 | """ |
|
601 | 608 | Merge the given source_revision into the checked out revision. |
|
602 | 609 | |
|
603 | 610 | Returns the commit id of the merge and a boolean indicating if the |
|
604 | 611 | commit needs to be pushed. |
|
605 | 612 | """ |
|
606 | 613 | self._update(target_ref.commit_id) |
|
607 | 614 | |
|
608 | 615 | ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id) |
|
609 | 616 | is_the_same_branch = self._is_the_same_branch(target_ref, source_ref) |
|
610 | 617 | |
|
611 | 618 | if ancestor == source_ref.commit_id: |
|
612 | 619 | # Nothing to do, the changes were already integrated |
|
613 | 620 | return target_ref.commit_id, False |
|
614 | 621 | |
|
615 | 622 | elif ancestor == target_ref.commit_id and is_the_same_branch: |
|
616 | 623 | # In this case we should force a commit message |
|
617 | 624 | return source_ref.commit_id, True |
|
618 | 625 | |
|
619 | 626 | if use_rebase: |
|
620 | 627 | try: |
|
621 | 628 | bookmark_name = 'rcbook%s%s' % (source_ref.commit_id, |
|
622 | 629 | target_ref.commit_id) |
|
623 | 630 | self.bookmark(bookmark_name, revision=source_ref.commit_id) |
|
624 | 631 | self._remote.rebase( |
|
625 | 632 | source=source_ref.commit_id, dest=target_ref.commit_id) |
|
626 | 633 | self._remote.invalidate_vcs_cache() |
|
627 | 634 | self._update(bookmark_name) |
|
628 | 635 | return self._identify(), True |
|
629 | 636 | except RepositoryError: |
|
630 | 637 | # The rebase-abort may raise another exception which 'hides' |
|
631 | 638 | # the original one, therefore we log it here. |
|
632 | 639 | log.exception('Error while rebasing shadow repo during merge.') |
|
633 | 640 | |
|
634 | 641 | # Cleanup any rebase leftovers |
|
635 | 642 | self._remote.invalidate_vcs_cache() |
|
636 | 643 | self._remote.rebase(abort=True) |
|
637 | 644 | self._remote.invalidate_vcs_cache() |
|
638 | 645 | self._remote.update(clean=True) |
|
639 | 646 | raise |
|
640 | 647 | else: |
|
641 | 648 | try: |
|
642 | 649 | self._remote.merge(source_ref.commit_id) |
|
643 | 650 | self._remote.invalidate_vcs_cache() |
|
644 | 651 | self._remote.commit( |
|
645 | 652 | message=safe_str(merge_message), |
|
646 | 653 | username=safe_str('%s <%s>' % (user_name, user_email))) |
|
647 | 654 | self._remote.invalidate_vcs_cache() |
|
648 | 655 | return self._identify(), True |
|
649 | 656 | except RepositoryError: |
|
650 | 657 | # Cleanup any merge leftovers |
|
651 | 658 | self._remote.update(clean=True) |
|
652 | 659 | raise |
|
653 | 660 | |
|
654 | 661 | def _local_close(self, target_ref, user_name, user_email, |
|
655 | 662 | source_ref, close_message=''): |
|
656 | 663 | """ |
|
657 | 664 | Close the branch of the given source_revision |
|
658 | 665 | |
|
659 | 666 | Returns the commit id of the close and a boolean indicating if the |
|
660 | 667 | commit needs to be pushed. |
|
661 | 668 | """ |
|
662 | 669 | self._update(source_ref.commit_id) |
|
663 | 670 | message = close_message or "Closing branch: `{}`".format(source_ref.name) |
|
664 | 671 | try: |
|
665 | 672 | self._remote.commit( |
|
666 | 673 | message=safe_str(message), |
|
667 | 674 | username=safe_str('%s <%s>' % (user_name, user_email)), |
|
668 | 675 | close_branch=True) |
|
669 | 676 | self._remote.invalidate_vcs_cache() |
|
670 | 677 | return self._identify(), True |
|
671 | 678 | except RepositoryError: |
|
672 | 679 | # Cleanup any commit leftovers |
|
673 | 680 | self._remote.update(clean=True) |
|
674 | 681 | raise |
|
675 | 682 | |
|
676 | 683 | def _is_the_same_branch(self, target_ref, source_ref): |
|
677 | 684 | return ( |
|
678 | 685 | self._get_branch_name(target_ref) == |
|
679 | 686 | self._get_branch_name(source_ref)) |
|
680 | 687 | |
|
681 | 688 | def _get_branch_name(self, ref): |
|
682 | 689 | if ref.type == 'branch': |
|
683 | 690 | return ref.name |
|
684 | 691 | return self._remote.ctx_branch(ref.commit_id) |
|
685 | 692 | |
|
686 | 693 | def _maybe_prepare_merge_workspace( |
|
687 | 694 | self, repo_id, workspace_id, unused_target_ref, unused_source_ref): |
|
688 | 695 | shadow_repository_path = self._get_shadow_repository_path( |
|
689 | 696 | repo_id, workspace_id) |
|
690 | 697 | if not os.path.exists(shadow_repository_path): |
|
691 | 698 | self._local_clone(shadow_repository_path) |
|
692 | 699 | log.debug( |
|
693 | 700 | 'Prepared shadow repository in %s', shadow_repository_path) |
|
694 | 701 | |
|
695 | 702 | return shadow_repository_path |
|
696 | 703 | |
|
697 | 704 | def _merge_repo(self, repo_id, workspace_id, target_ref, |
|
698 | 705 | source_repo, source_ref, merge_message, |
|
699 | 706 | merger_name, merger_email, dry_run=False, |
|
700 | 707 | use_rebase=False, close_branch=False): |
|
701 | 708 | |
|
702 | 709 | log.debug('Executing merge_repo with %s strategy, dry_run mode:%s', |
|
703 | 710 | 'rebase' if use_rebase else 'merge', dry_run) |
|
704 | 711 | if target_ref.commit_id not in self._heads(): |
|
705 | 712 | return MergeResponse( |
|
706 | 713 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD) |
|
707 | 714 | |
|
708 | 715 | try: |
|
709 | 716 | if (target_ref.type == 'branch' and |
|
710 | 717 | len(self._heads(target_ref.name)) != 1): |
|
711 | 718 | return MergeResponse( |
|
712 | 719 | False, False, None, |
|
713 | 720 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS) |
|
714 | 721 | except CommitDoesNotExistError: |
|
715 | 722 | log.exception('Failure when looking up branch heads on hg target') |
|
716 | 723 | return MergeResponse( |
|
717 | 724 | False, False, None, MergeFailureReason.MISSING_TARGET_REF) |
|
718 | 725 | |
|
719 | 726 | shadow_repository_path = self._maybe_prepare_merge_workspace( |
|
720 | 727 | repo_id, workspace_id, target_ref, source_ref) |
|
721 | 728 | shadow_repo = self._get_shadow_instance(shadow_repository_path) |
|
722 | 729 | |
|
723 | 730 | log.debug('Pulling in target reference %s', target_ref) |
|
724 | 731 | self._validate_pull_reference(target_ref) |
|
725 | 732 | shadow_repo._local_pull(self.path, target_ref) |
|
726 | 733 | try: |
|
727 | 734 | log.debug('Pulling in source reference %s', source_ref) |
|
728 | 735 | source_repo._validate_pull_reference(source_ref) |
|
729 | 736 | shadow_repo._local_pull(source_repo.path, source_ref) |
|
730 | 737 | except CommitDoesNotExistError: |
|
731 | 738 | log.exception('Failure when doing local pull on hg shadow repo') |
|
732 | 739 | return MergeResponse( |
|
733 | 740 | False, False, None, MergeFailureReason.MISSING_SOURCE_REF) |
|
734 | 741 | |
|
735 | 742 | merge_ref = None |
|
736 | 743 | merge_commit_id = None |
|
737 | 744 | close_commit_id = None |
|
738 | 745 | merge_failure_reason = MergeFailureReason.NONE |
|
739 | 746 | |
|
740 | 747 | # enforce that close branch should be used only in case we source from |
|
741 | 748 | # an actual Branch |
|
742 | 749 | close_branch = close_branch and source_ref.type == 'branch' |
|
743 | 750 | |
|
744 | 751 | # don't allow to close branch if source and target are the same |
|
745 | 752 | close_branch = close_branch and source_ref.name != target_ref.name |
|
746 | 753 | |
|
747 | 754 | needs_push_on_close = False |
|
748 | 755 | if close_branch and not use_rebase and not dry_run: |
|
749 | 756 | try: |
|
750 | 757 | close_commit_id, needs_push_on_close = shadow_repo._local_close( |
|
751 | 758 | target_ref, merger_name, merger_email, source_ref) |
|
752 | 759 | merge_possible = True |
|
753 | 760 | except RepositoryError: |
|
754 | 761 | log.exception( |
|
755 | 762 | 'Failure when doing close branch on hg shadow repo') |
|
756 | 763 | merge_possible = False |
|
757 | 764 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
758 | 765 | else: |
|
759 | 766 | merge_possible = True |
|
760 | 767 | |
|
761 | 768 | needs_push = False |
|
762 | 769 | if merge_possible: |
|
763 | 770 | try: |
|
764 | 771 | merge_commit_id, needs_push = shadow_repo._local_merge( |
|
765 | 772 | target_ref, merge_message, merger_name, merger_email, |
|
766 | 773 | source_ref, use_rebase=use_rebase, dry_run=dry_run) |
|
767 | 774 | merge_possible = True |
|
768 | 775 | |
|
769 | 776 | # read the state of the close action, if it |
|
770 | 777 | # maybe required a push |
|
771 | 778 | needs_push = needs_push or needs_push_on_close |
|
772 | 779 | |
|
773 | 780 | # Set a bookmark pointing to the merge commit. This bookmark |
|
774 | 781 | # may be used to easily identify the last successful merge |
|
775 | 782 | # commit in the shadow repository. |
|
776 | 783 | shadow_repo.bookmark('pr-merge', revision=merge_commit_id) |
|
777 | 784 | merge_ref = Reference('book', 'pr-merge', merge_commit_id) |
|
778 | 785 | except SubrepoMergeError: |
|
779 | 786 | log.exception( |
|
780 | 787 | 'Subrepo merge error during local merge on hg shadow repo.') |
|
781 | 788 | merge_possible = False |
|
782 | 789 | merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED |
|
783 | 790 | needs_push = False |
|
784 | 791 | except RepositoryError: |
|
785 | 792 | log.exception('Failure when doing local merge on hg shadow repo') |
|
786 | 793 | merge_possible = False |
|
787 | 794 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
788 | 795 | needs_push = False |
|
789 | 796 | |
|
790 | 797 | if merge_possible and not dry_run: |
|
791 | 798 | if needs_push: |
|
792 | 799 | # In case the target is a bookmark, update it, so after pushing |
|
793 | 800 | # the bookmarks is also updated in the target. |
|
794 | 801 | if target_ref.type == 'book': |
|
795 | 802 | shadow_repo.bookmark( |
|
796 | 803 | target_ref.name, revision=merge_commit_id) |
|
797 | 804 | try: |
|
798 | 805 | shadow_repo_with_hooks = self._get_shadow_instance( |
|
799 | 806 | shadow_repository_path, |
|
800 | 807 | enable_hooks=True) |
|
801 | 808 | # This is the actual merge action, we push from shadow |
|
802 | 809 | # into origin. |
|
803 | 810 | # Note: the push_branches option will push any new branch |
|
804 | 811 | # defined in the source repository to the target. This may |
|
805 | 812 | # be dangerous as branches are permanent in Mercurial. |
|
806 | 813 | # This feature was requested in issue #441. |
|
807 | 814 | shadow_repo_with_hooks._local_push( |
|
808 | 815 | merge_commit_id, self.path, push_branches=True, |
|
809 | 816 | enable_hooks=True) |
|
810 | 817 | |
|
811 | 818 | # maybe we also need to push the close_commit_id |
|
812 | 819 | if close_commit_id: |
|
813 | 820 | shadow_repo_with_hooks._local_push( |
|
814 | 821 | close_commit_id, self.path, push_branches=True, |
|
815 | 822 | enable_hooks=True) |
|
816 | 823 | merge_succeeded = True |
|
817 | 824 | except RepositoryError: |
|
818 | 825 | log.exception( |
|
819 | 826 | 'Failure when doing local push from the shadow ' |
|
820 | 827 | 'repository to the target repository.') |
|
821 | 828 | merge_succeeded = False |
|
822 | 829 | merge_failure_reason = MergeFailureReason.PUSH_FAILED |
|
823 | 830 | else: |
|
824 | 831 | merge_succeeded = True |
|
825 | 832 | else: |
|
826 | 833 | merge_succeeded = False |
|
827 | 834 | |
|
828 | 835 | return MergeResponse( |
|
829 | 836 | merge_possible, merge_succeeded, merge_ref, merge_failure_reason) |
|
830 | 837 | |
|
831 | 838 | def _get_shadow_instance( |
|
832 | 839 | self, shadow_repository_path, enable_hooks=False): |
|
833 | 840 | config = self.config.copy() |
|
834 | 841 | if not enable_hooks: |
|
835 | 842 | config.clear_section('hooks') |
|
836 | 843 | return MercurialRepository(shadow_repository_path, config) |
|
837 | 844 | |
|
838 | 845 | def _validate_pull_reference(self, reference): |
|
839 | 846 | if not (reference.name in self.bookmarks or |
|
840 | 847 | reference.name in self.branches or |
|
841 | 848 | self.get_commit(reference.commit_id)): |
|
842 | 849 | raise CommitDoesNotExistError( |
|
843 | 850 | 'Unknown branch, bookmark or commit id') |
|
844 | 851 | |
|
845 | 852 | def _local_pull(self, repository_path, reference): |
|
846 | 853 | """ |
|
847 | 854 | Fetch a branch, bookmark or commit from a local repository. |
|
848 | 855 | """ |
|
849 | 856 | repository_path = os.path.abspath(repository_path) |
|
850 | 857 | if repository_path == self.path: |
|
851 | 858 | raise ValueError('Cannot pull from the same repository') |
|
852 | 859 | |
|
853 | 860 | reference_type_to_option_name = { |
|
854 | 861 | 'book': 'bookmark', |
|
855 | 862 | 'branch': 'branch', |
|
856 | 863 | } |
|
857 | 864 | option_name = reference_type_to_option_name.get( |
|
858 | 865 | reference.type, 'revision') |
|
859 | 866 | |
|
860 | 867 | if option_name == 'revision': |
|
861 | 868 | ref = reference.commit_id |
|
862 | 869 | else: |
|
863 | 870 | ref = reference.name |
|
864 | 871 | |
|
865 | 872 | options = {option_name: [ref]} |
|
866 | 873 | self._remote.pull_cmd(repository_path, hooks=False, **options) |
|
867 | 874 | self._remote.invalidate_vcs_cache() |
|
868 | 875 | |
|
869 | 876 | def bookmark(self, bookmark, revision=None): |
|
870 | 877 | if isinstance(bookmark, unicode): |
|
871 | 878 | bookmark = safe_str(bookmark) |
|
872 | 879 | self._remote.bookmark(bookmark, revision=revision) |
|
873 | 880 | self._remote.invalidate_vcs_cache() |
|
874 | 881 | |
|
875 | 882 | def get_path_permissions(self, username): |
|
876 | 883 | hgacl_file = os.path.join(self.path, '.hg/hgacl') |
|
877 | 884 | |
|
878 | 885 | def read_patterns(suffix): |
|
879 | 886 | svalue = None |
|
880 | 887 | try: |
|
881 | 888 | svalue = hgacl.get('narrowhgacl', username + suffix) |
|
882 | 889 | except configparser.NoOptionError: |
|
883 | 890 | try: |
|
884 | 891 | svalue = hgacl.get('narrowhgacl', 'default' + suffix) |
|
885 | 892 | except configparser.NoOptionError: |
|
886 | 893 | pass |
|
887 | 894 | if not svalue: |
|
888 | 895 | return None |
|
889 | 896 | result = ['/'] |
|
890 | 897 | for pattern in svalue.split(): |
|
891 | 898 | result.append(pattern) |
|
892 | 899 | if '*' not in pattern and '?' not in pattern: |
|
893 | 900 | result.append(pattern + '/*') |
|
894 | 901 | return result |
|
895 | 902 | |
|
896 | 903 | if os.path.exists(hgacl_file): |
|
897 | 904 | try: |
|
898 | 905 | hgacl = configparser.RawConfigParser() |
|
899 | 906 | hgacl.read(hgacl_file) |
|
900 | 907 | |
|
901 | 908 | includes = read_patterns('.includes') |
|
902 | 909 | excludes = read_patterns('.excludes') |
|
903 | 910 | return BasePathPermissionChecker.create_from_patterns( |
|
904 | 911 | includes, excludes) |
|
905 | 912 | except BaseException as e: |
|
906 | 913 | msg = 'Cannot read ACL settings from {} on {}: {}'.format( |
|
907 | 914 | hgacl_file, self.name, e) |
|
908 | 915 | raise exceptions.RepositoryRequirementError(msg) |
|
909 | 916 | else: |
|
910 | 917 | return None |
|
911 | 918 | |
|
912 | 919 | |
|
913 | 920 | class MercurialIndexBasedCollectionGenerator(CollectionGenerator): |
|
914 | 921 | |
|
915 | 922 | def _commit_factory(self, commit_id): |
|
916 | 923 | return self.repo.get_commit( |
|
917 | 924 | commit_idx=commit_id, pre_load=self.pre_load) |
@@ -1,343 +1,343 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | SVN repository module |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import os |
|
27 | 27 | import urllib |
|
28 | 28 | |
|
29 | 29 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | 30 | |
|
31 | 31 | from rhodecode.lib.compat import OrderedDict |
|
32 | 32 | from rhodecode.lib.datelib import date_astimestamp |
|
33 | 33 | from rhodecode.lib.utils import safe_str, safe_unicode |
|
34 | 34 | from rhodecode.lib.vcs import connection, path as vcspath |
|
35 | 35 | from rhodecode.lib.vcs.backends import base |
|
36 | 36 | from rhodecode.lib.vcs.backends.svn.commit import ( |
|
37 | 37 | SubversionCommit, _date_from_svn_properties) |
|
38 | 38 | from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff |
|
39 | 39 | from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit |
|
40 | 40 | from rhodecode.lib.vcs.conf import settings |
|
41 | 41 | from rhodecode.lib.vcs.exceptions import ( |
|
42 | 42 | CommitDoesNotExistError, EmptyRepositoryError, RepositoryError, |
|
43 | 43 | VCSError, NodeDoesNotExistError) |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class SubversionRepository(base.BaseRepository): |
|
50 | 50 | """ |
|
51 | 51 | Subversion backend implementation |
|
52 | 52 | |
|
53 | 53 | .. important:: |
|
54 | 54 | |
|
55 | 55 | It is very important to distinguish the commit index and the commit id |
|
56 | 56 | which is assigned by Subversion. The first one is always handled as an |
|
57 | 57 | `int` by this implementation. The commit id assigned by Subversion on |
|
58 | 58 | the other side will always be a `str`. |
|
59 | 59 | |
|
60 | 60 | There is a specific trap since the first commit will have the index |
|
61 | 61 | ``0`` but the svn id will be ``"1"``. |
|
62 | 62 | |
|
63 | 63 | """ |
|
64 | 64 | |
|
65 | 65 | # Note: Subversion does not really have a default branch name. |
|
66 | 66 | DEFAULT_BRANCH_NAME = None |
|
67 | 67 | |
|
68 | 68 | contact = base.BaseRepository.DEFAULT_CONTACT |
|
69 | 69 | description = base.BaseRepository.DEFAULT_DESCRIPTION |
|
70 | 70 | |
|
71 | def __init__(self, repo_path, config=None, create=False, src_url=None, | |
|
71 | def __init__(self, repo_path, config=None, create=False, src_url=None, bare=False, | |
|
72 | 72 | **kwargs): |
|
73 | 73 | self.path = safe_str(os.path.abspath(repo_path)) |
|
74 | 74 | self.config = config if config else self.get_default_config() |
|
75 | 75 | |
|
76 | 76 | self._init_repo(create, src_url) |
|
77 | 77 | |
|
78 | 78 | @LazyProperty |
|
79 | 79 | def _remote(self): |
|
80 | 80 | return connection.Svn(self.path, self.config) |
|
81 | 81 | |
|
82 | 82 | def _init_repo(self, create, src_url): |
|
83 | 83 | if create and os.path.exists(self.path): |
|
84 | 84 | raise RepositoryError( |
|
85 | 85 | "Cannot create repository at %s, location already exist" |
|
86 | 86 | % self.path) |
|
87 | 87 | |
|
88 | 88 | if create: |
|
89 | 89 | self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION) |
|
90 | 90 | if src_url: |
|
91 | 91 | src_url = _sanitize_url(src_url) |
|
92 | 92 | self._remote.import_remote_repository(src_url) |
|
93 | 93 | else: |
|
94 | 94 | self._check_path() |
|
95 | 95 | |
|
96 | 96 | @LazyProperty |
|
97 | 97 | def commit_ids(self): |
|
98 | 98 | head = self._remote.lookup(None) |
|
99 | 99 | return [str(r) for r in xrange(1, head + 1)] |
|
100 | 100 | |
|
101 | 101 | @LazyProperty |
|
102 | 102 | def branches(self): |
|
103 | 103 | return self._tags_or_branches('vcs_svn_branch') |
|
104 | 104 | |
|
105 | 105 | @LazyProperty |
|
106 | 106 | def branches_closed(self): |
|
107 | 107 | return {} |
|
108 | 108 | |
|
109 | 109 | @LazyProperty |
|
110 | 110 | def bookmarks(self): |
|
111 | 111 | return {} |
|
112 | 112 | |
|
113 | 113 | @LazyProperty |
|
114 | 114 | def branches_all(self): |
|
115 | 115 | # TODO: johbo: Implement proper branch support |
|
116 | 116 | all_branches = {} |
|
117 | 117 | all_branches.update(self.branches) |
|
118 | 118 | all_branches.update(self.branches_closed) |
|
119 | 119 | return all_branches |
|
120 | 120 | |
|
121 | 121 | @LazyProperty |
|
122 | 122 | def tags(self): |
|
123 | 123 | return self._tags_or_branches('vcs_svn_tag') |
|
124 | 124 | |
|
125 | 125 | def _tags_or_branches(self, config_section): |
|
126 | 126 | found_items = {} |
|
127 | 127 | |
|
128 | 128 | if self.is_empty(): |
|
129 | 129 | return {} |
|
130 | 130 | |
|
131 | 131 | for pattern in self._patterns_from_section(config_section): |
|
132 | 132 | pattern = vcspath.sanitize(pattern) |
|
133 | 133 | tip = self.get_commit() |
|
134 | 134 | try: |
|
135 | 135 | if pattern.endswith('*'): |
|
136 | 136 | basedir = tip.get_node(vcspath.dirname(pattern)) |
|
137 | 137 | directories = basedir.dirs |
|
138 | 138 | else: |
|
139 | 139 | directories = (tip.get_node(pattern), ) |
|
140 | 140 | except NodeDoesNotExistError: |
|
141 | 141 | continue |
|
142 | 142 | found_items.update( |
|
143 | 143 | (safe_unicode(n.path), |
|
144 | 144 | self.commit_ids[-1]) |
|
145 | 145 | for n in directories) |
|
146 | 146 | |
|
147 | 147 | def get_name(item): |
|
148 | 148 | return item[0] |
|
149 | 149 | |
|
150 | 150 | return OrderedDict(sorted(found_items.items(), key=get_name)) |
|
151 | 151 | |
|
152 | 152 | def _patterns_from_section(self, section): |
|
153 | 153 | return (pattern for key, pattern in self.config.items(section)) |
|
154 | 154 | |
|
155 | 155 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
156 | 156 | if self != repo2: |
|
157 | 157 | raise ValueError( |
|
158 | 158 | "Subversion does not support getting common ancestor of" |
|
159 | 159 | " different repositories.") |
|
160 | 160 | |
|
161 | 161 | if int(commit_id1) < int(commit_id2): |
|
162 | 162 | return commit_id1 |
|
163 | 163 | return commit_id2 |
|
164 | 164 | |
|
165 | 165 | def verify(self): |
|
166 | 166 | verify = self._remote.verify() |
|
167 | 167 | |
|
168 | 168 | self._remote.invalidate_vcs_cache() |
|
169 | 169 | return verify |
|
170 | 170 | |
|
171 | 171 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
172 | 172 | # TODO: johbo: Implement better comparison, this is a very naive |
|
173 | 173 | # version which does not allow to compare branches, tags or folders |
|
174 | 174 | # at all. |
|
175 | 175 | if repo2 != self: |
|
176 | 176 | raise ValueError( |
|
177 | 177 | "Subversion does not support comparison of of different " |
|
178 | 178 | "repositories.") |
|
179 | 179 | |
|
180 | 180 | if commit_id1 == commit_id2: |
|
181 | 181 | return [] |
|
182 | 182 | |
|
183 | 183 | commit_idx1 = self._get_commit_idx(commit_id1) |
|
184 | 184 | commit_idx2 = self._get_commit_idx(commit_id2) |
|
185 | 185 | |
|
186 | 186 | commits = [ |
|
187 | 187 | self.get_commit(commit_idx=idx) |
|
188 | 188 | for idx in range(commit_idx1 + 1, commit_idx2 + 1)] |
|
189 | 189 | |
|
190 | 190 | return commits |
|
191 | 191 | |
|
192 | 192 | def _get_commit_idx(self, commit_id): |
|
193 | 193 | try: |
|
194 | 194 | svn_rev = int(commit_id) |
|
195 | 195 | except: |
|
196 | 196 | # TODO: johbo: this might be only one case, HEAD, check this |
|
197 | 197 | svn_rev = self._remote.lookup(commit_id) |
|
198 | 198 | commit_idx = svn_rev - 1 |
|
199 | 199 | if commit_idx >= len(self.commit_ids): |
|
200 | 200 | raise CommitDoesNotExistError( |
|
201 | 201 | "Commit at index %s does not exist." % (commit_idx, )) |
|
202 | 202 | return commit_idx |
|
203 | 203 | |
|
204 | 204 | @staticmethod |
|
205 | 205 | def check_url(url, config): |
|
206 | 206 | """ |
|
207 | 207 | Check if `url` is a valid source to import a Subversion repository. |
|
208 | 208 | """ |
|
209 | 209 | # convert to URL if it's a local directory |
|
210 | 210 | if os.path.isdir(url): |
|
211 | 211 | url = 'file://' + urllib.pathname2url(url) |
|
212 | 212 | return connection.Svn.check_url(url, config.serialize()) |
|
213 | 213 | |
|
214 | 214 | @staticmethod |
|
215 | 215 | def is_valid_repository(path): |
|
216 | 216 | try: |
|
217 | 217 | SubversionRepository(path) |
|
218 | 218 | return True |
|
219 | 219 | except VCSError: |
|
220 | 220 | pass |
|
221 | 221 | return False |
|
222 | 222 | |
|
223 | 223 | def _check_path(self): |
|
224 | 224 | if not os.path.exists(self.path): |
|
225 | 225 | raise VCSError('Path "%s" does not exist!' % (self.path, )) |
|
226 | 226 | if not self._remote.is_path_valid_repository(self.path): |
|
227 | 227 | raise VCSError( |
|
228 | 228 | 'Path "%s" does not contain a Subversion repository' % |
|
229 | 229 | (self.path, )) |
|
230 | 230 | |
|
231 | 231 | @LazyProperty |
|
232 | 232 | def last_change(self): |
|
233 | 233 | """ |
|
234 | 234 | Returns last change made on this repository as |
|
235 | 235 | `datetime.datetime` object. |
|
236 | 236 | """ |
|
237 | 237 | # Subversion always has a first commit which has id "0" and contains |
|
238 | 238 | # what we are looking for. |
|
239 | 239 | last_id = len(self.commit_ids) |
|
240 | 240 | properties = self._remote.revision_properties(last_id) |
|
241 | 241 | return _date_from_svn_properties(properties) |
|
242 | 242 | |
|
243 | 243 | @LazyProperty |
|
244 | 244 | def in_memory_commit(self): |
|
245 | 245 | return SubversionInMemoryCommit(self) |
|
246 | 246 | |
|
247 | 247 | def get_hook_location(self): |
|
248 | 248 | """ |
|
249 | 249 | returns absolute path to location where hooks are stored |
|
250 | 250 | """ |
|
251 | 251 | return os.path.join(self.path, 'hooks') |
|
252 | 252 | |
|
253 | 253 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): |
|
254 | 254 | if self.is_empty(): |
|
255 | 255 | raise EmptyRepositoryError("There are no commits yet") |
|
256 | 256 | if commit_id is not None: |
|
257 | 257 | self._validate_commit_id(commit_id) |
|
258 | 258 | elif commit_idx is not None: |
|
259 | 259 | self._validate_commit_idx(commit_idx) |
|
260 | 260 | try: |
|
261 | 261 | commit_id = self.commit_ids[commit_idx] |
|
262 | 262 | except IndexError: |
|
263 | 263 | raise CommitDoesNotExistError |
|
264 | 264 | |
|
265 | 265 | commit_id = self._sanitize_commit_id(commit_id) |
|
266 | 266 | commit = SubversionCommit(repository=self, commit_id=commit_id) |
|
267 | 267 | return commit |
|
268 | 268 | |
|
269 | 269 | def get_commits( |
|
270 | 270 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
271 | 271 | branch_name=None, show_hidden=False, pre_load=None): |
|
272 | 272 | if self.is_empty(): |
|
273 | 273 | raise EmptyRepositoryError("There are no commit_ids yet") |
|
274 | 274 | self._validate_branch_name(branch_name) |
|
275 | 275 | |
|
276 | 276 | if start_id is not None: |
|
277 | 277 | self._validate_commit_id(start_id) |
|
278 | 278 | if end_id is not None: |
|
279 | 279 | self._validate_commit_id(end_id) |
|
280 | 280 | |
|
281 | 281 | start_raw_id = self._sanitize_commit_id(start_id) |
|
282 | 282 | start_pos = self.commit_ids.index(start_raw_id) if start_id else None |
|
283 | 283 | end_raw_id = self._sanitize_commit_id(end_id) |
|
284 | 284 | end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None |
|
285 | 285 | |
|
286 | 286 | if None not in [start_id, end_id] and start_pos > end_pos: |
|
287 | 287 | raise RepositoryError( |
|
288 | 288 | "Start commit '%s' cannot be after end commit '%s'" % |
|
289 | 289 | (start_id, end_id)) |
|
290 | 290 | if end_pos is not None: |
|
291 | 291 | end_pos += 1 |
|
292 | 292 | |
|
293 | 293 | # Date based filtering |
|
294 | 294 | if start_date or end_date: |
|
295 | 295 | start_raw_id, end_raw_id = self._remote.lookup_interval( |
|
296 | 296 | date_astimestamp(start_date) if start_date else None, |
|
297 | 297 | date_astimestamp(end_date) if end_date else None) |
|
298 | 298 | start_pos = start_raw_id - 1 |
|
299 | 299 | end_pos = end_raw_id |
|
300 | 300 | |
|
301 | 301 | commit_ids = self.commit_ids |
|
302 | 302 | |
|
303 | 303 | # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here |
|
304 | 304 | if branch_name not in [None, self.DEFAULT_BRANCH_NAME]: |
|
305 | 305 | svn_rev = long(self.commit_ids[-1]) |
|
306 | 306 | commit_ids = self._remote.node_history( |
|
307 | 307 | path=branch_name, revision=svn_rev, limit=None) |
|
308 | 308 | commit_ids = [str(i) for i in reversed(commit_ids)] |
|
309 | 309 | |
|
310 | 310 | if start_pos or end_pos: |
|
311 | 311 | commit_ids = commit_ids[start_pos:end_pos] |
|
312 | 312 | return base.CollectionGenerator(self, commit_ids, pre_load=pre_load) |
|
313 | 313 | |
|
314 | 314 | def _sanitize_commit_id(self, commit_id): |
|
315 | 315 | if commit_id and commit_id.isdigit(): |
|
316 | 316 | if int(commit_id) <= len(self.commit_ids): |
|
317 | 317 | return commit_id |
|
318 | 318 | else: |
|
319 | 319 | raise CommitDoesNotExistError( |
|
320 | 320 | "Commit %s does not exist." % (commit_id, )) |
|
321 | 321 | if commit_id not in [ |
|
322 | 322 | None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]: |
|
323 | 323 | raise CommitDoesNotExistError( |
|
324 | 324 | "Commit id %s not understood." % (commit_id, )) |
|
325 | 325 | svn_rev = self._remote.lookup('HEAD') |
|
326 | 326 | return str(svn_rev) |
|
327 | 327 | |
|
328 | 328 | def get_diff( |
|
329 | 329 | self, commit1, commit2, path=None, ignore_whitespace=False, |
|
330 | 330 | context=3, path1=None): |
|
331 | 331 | self._validate_diff_commits(commit1, commit2) |
|
332 | 332 | svn_rev1 = long(commit1.raw_id) |
|
333 | 333 | svn_rev2 = long(commit2.raw_id) |
|
334 | 334 | diff = self._remote.diff( |
|
335 | 335 | svn_rev1, svn_rev2, path1=path1, path2=path, |
|
336 | 336 | ignore_whitespace=ignore_whitespace, context=context) |
|
337 | 337 | return SubversionDiff(diff) |
|
338 | 338 | |
|
339 | 339 | |
|
340 | 340 | def _sanitize_url(url): |
|
341 | 341 | if '://' not in url: |
|
342 | 342 | url = 'file://' + urllib.pathname2url(url) |
|
343 | 343 | return url |
@@ -1,833 +1,833 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Scm model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os.path |
|
26 | 26 | import traceback |
|
27 | 27 | import logging |
|
28 | 28 | import cStringIO |
|
29 | 29 | |
|
30 | 30 | from sqlalchemy import func |
|
31 | 31 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
32 | 32 | |
|
33 | 33 | import rhodecode |
|
34 | 34 | from rhodecode.lib.vcs import get_backend |
|
35 | 35 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError |
|
36 | 36 | from rhodecode.lib.vcs.nodes import FileNode |
|
37 | 37 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
38 | 38 | from rhodecode.lib import helpers as h, rc_cache |
|
39 | 39 | from rhodecode.lib.auth import ( |
|
40 | 40 | HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
41 | 41 | HasUserGroupPermissionAny) |
|
42 | 42 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError |
|
43 | 43 | from rhodecode.lib import hooks_utils |
|
44 | 44 | from rhodecode.lib.utils import ( |
|
45 | 45 | get_filesystem_repos, make_db_config) |
|
46 | 46 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) |
|
47 | 47 | from rhodecode.lib.system_info import get_system_info |
|
48 | 48 | from rhodecode.model import BaseModel |
|
49 | 49 | from rhodecode.model.db import ( |
|
50 | 50 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, |
|
51 | 51 | PullRequest) |
|
52 | 52 | from rhodecode.model.settings import VcsSettingsModel |
|
53 | 53 | from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl |
|
54 | 54 | |
|
55 | 55 | log = logging.getLogger(__name__) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | class UserTemp(object): |
|
59 | 59 | def __init__(self, user_id): |
|
60 | 60 | self.user_id = user_id |
|
61 | 61 | |
|
62 | 62 | def __repr__(self): |
|
63 | 63 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | class RepoTemp(object): |
|
67 | 67 | def __init__(self, repo_id): |
|
68 | 68 | self.repo_id = repo_id |
|
69 | 69 | |
|
70 | 70 | def __repr__(self): |
|
71 | 71 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) |
|
72 | 72 | |
|
73 | 73 | |
|
74 | 74 | class SimpleCachedRepoList(object): |
|
75 | 75 | """ |
|
76 | 76 | Lighter version of of iteration of repos without the scm initialisation, |
|
77 | 77 | and with cache usage |
|
78 | 78 | """ |
|
79 | 79 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): |
|
80 | 80 | self.db_repo_list = db_repo_list |
|
81 | 81 | self.repos_path = repos_path |
|
82 | 82 | self.order_by = order_by |
|
83 | 83 | self.reversed = (order_by or '').startswith('-') |
|
84 | 84 | if not perm_set: |
|
85 | 85 | perm_set = ['repository.read', 'repository.write', |
|
86 | 86 | 'repository.admin'] |
|
87 | 87 | self.perm_set = perm_set |
|
88 | 88 | |
|
89 | 89 | def __len__(self): |
|
90 | 90 | return len(self.db_repo_list) |
|
91 | 91 | |
|
92 | 92 | def __repr__(self): |
|
93 | 93 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
94 | 94 | |
|
95 | 95 | def __iter__(self): |
|
96 | 96 | for dbr in self.db_repo_list: |
|
97 | 97 | # check permission at this level |
|
98 | 98 | has_perm = HasRepoPermissionAny(*self.perm_set)( |
|
99 | 99 | dbr.repo_name, 'SimpleCachedRepoList check') |
|
100 | 100 | if not has_perm: |
|
101 | 101 | continue |
|
102 | 102 | |
|
103 | 103 | tmp_d = { |
|
104 | 104 | 'name': dbr.repo_name, |
|
105 | 105 | 'dbrepo': dbr.get_dict(), |
|
106 | 106 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} |
|
107 | 107 | } |
|
108 | 108 | yield tmp_d |
|
109 | 109 | |
|
110 | 110 | |
|
111 | 111 | class _PermCheckIterator(object): |
|
112 | 112 | |
|
113 | 113 | def __init__( |
|
114 | 114 | self, obj_list, obj_attr, perm_set, perm_checker, |
|
115 | 115 | extra_kwargs=None): |
|
116 | 116 | """ |
|
117 | 117 | Creates iterator from given list of objects, additionally |
|
118 | 118 | checking permission for them from perm_set var |
|
119 | 119 | |
|
120 | 120 | :param obj_list: list of db objects |
|
121 | 121 | :param obj_attr: attribute of object to pass into perm_checker |
|
122 | 122 | :param perm_set: list of permissions to check |
|
123 | 123 | :param perm_checker: callable to check permissions against |
|
124 | 124 | """ |
|
125 | 125 | self.obj_list = obj_list |
|
126 | 126 | self.obj_attr = obj_attr |
|
127 | 127 | self.perm_set = perm_set |
|
128 | 128 | self.perm_checker = perm_checker |
|
129 | 129 | self.extra_kwargs = extra_kwargs or {} |
|
130 | 130 | |
|
131 | 131 | def __len__(self): |
|
132 | 132 | return len(self.obj_list) |
|
133 | 133 | |
|
134 | 134 | def __repr__(self): |
|
135 | 135 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
136 | 136 | |
|
137 | 137 | def __iter__(self): |
|
138 | 138 | checker = self.perm_checker(*self.perm_set) |
|
139 | 139 | for db_obj in self.obj_list: |
|
140 | 140 | # check permission at this level |
|
141 | 141 | name = getattr(db_obj, self.obj_attr, None) |
|
142 | 142 | if not checker(name, self.__class__.__name__, **self.extra_kwargs): |
|
143 | 143 | continue |
|
144 | 144 | |
|
145 | 145 | yield db_obj |
|
146 | 146 | |
|
147 | 147 | |
|
148 | 148 | class RepoList(_PermCheckIterator): |
|
149 | 149 | |
|
150 | 150 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): |
|
151 | 151 | if not perm_set: |
|
152 | 152 | perm_set = [ |
|
153 | 153 | 'repository.read', 'repository.write', 'repository.admin'] |
|
154 | 154 | |
|
155 | 155 | super(RepoList, self).__init__( |
|
156 | 156 | obj_list=db_repo_list, |
|
157 | 157 | obj_attr='repo_name', perm_set=perm_set, |
|
158 | 158 | perm_checker=HasRepoPermissionAny, |
|
159 | 159 | extra_kwargs=extra_kwargs) |
|
160 | 160 | |
|
161 | 161 | |
|
162 | 162 | class RepoGroupList(_PermCheckIterator): |
|
163 | 163 | |
|
164 | 164 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): |
|
165 | 165 | if not perm_set: |
|
166 | 166 | perm_set = ['group.read', 'group.write', 'group.admin'] |
|
167 | 167 | |
|
168 | 168 | super(RepoGroupList, self).__init__( |
|
169 | 169 | obj_list=db_repo_group_list, |
|
170 | 170 | obj_attr='group_name', perm_set=perm_set, |
|
171 | 171 | perm_checker=HasRepoGroupPermissionAny, |
|
172 | 172 | extra_kwargs=extra_kwargs) |
|
173 | 173 | |
|
174 | 174 | |
|
175 | 175 | class UserGroupList(_PermCheckIterator): |
|
176 | 176 | |
|
177 | 177 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): |
|
178 | 178 | if not perm_set: |
|
179 | 179 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] |
|
180 | 180 | |
|
181 | 181 | super(UserGroupList, self).__init__( |
|
182 | 182 | obj_list=db_user_group_list, |
|
183 | 183 | obj_attr='users_group_name', perm_set=perm_set, |
|
184 | 184 | perm_checker=HasUserGroupPermissionAny, |
|
185 | 185 | extra_kwargs=extra_kwargs) |
|
186 | 186 | |
|
187 | 187 | |
|
188 | 188 | class ScmModel(BaseModel): |
|
189 | 189 | """ |
|
190 | 190 | Generic Scm Model |
|
191 | 191 | """ |
|
192 | 192 | |
|
193 | 193 | @LazyProperty |
|
194 | 194 | def repos_path(self): |
|
195 | 195 | """ |
|
196 | 196 | Gets the repositories root path from database |
|
197 | 197 | """ |
|
198 | 198 | |
|
199 | 199 | settings_model = VcsSettingsModel(sa=self.sa) |
|
200 | 200 | return settings_model.get_repos_location() |
|
201 | 201 | |
|
202 | 202 | def repo_scan(self, repos_path=None): |
|
203 | 203 | """ |
|
204 | 204 | Listing of repositories in given path. This path should not be a |
|
205 | 205 | repository itself. Return a dictionary of repository objects |
|
206 | 206 | |
|
207 | 207 | :param repos_path: path to directory containing repositories |
|
208 | 208 | """ |
|
209 | 209 | |
|
210 | 210 | if repos_path is None: |
|
211 | 211 | repos_path = self.repos_path |
|
212 | 212 | |
|
213 | 213 | log.info('scanning for repositories in %s', repos_path) |
|
214 | 214 | |
|
215 | 215 | config = make_db_config() |
|
216 | 216 | config.set('extensions', 'largefiles', '') |
|
217 | 217 | repos = {} |
|
218 | 218 | |
|
219 | 219 | for name, path in get_filesystem_repos(repos_path, recursive=True): |
|
220 | 220 | # name need to be decomposed and put back together using the / |
|
221 | 221 | # since this is internal storage separator for rhodecode |
|
222 | 222 | name = Repository.normalize_repo_name(name) |
|
223 | 223 | |
|
224 | 224 | try: |
|
225 | 225 | if name in repos: |
|
226 | 226 | raise RepositoryError('Duplicate repository name %s ' |
|
227 | 227 | 'found in %s' % (name, path)) |
|
228 | 228 | elif path[0] in rhodecode.BACKENDS: |
|
229 | 229 | klass = get_backend(path[0]) |
|
230 | 230 | repos[name] = klass(path[1], config=config) |
|
231 | 231 | except OSError: |
|
232 | 232 | continue |
|
233 | 233 | log.debug('found %s paths with repositories', len(repos)) |
|
234 | 234 | return repos |
|
235 | 235 | |
|
236 | 236 | def get_repos(self, all_repos=None, sort_key=None): |
|
237 | 237 | """ |
|
238 | 238 | Get all repositories from db and for each repo create it's |
|
239 | 239 | backend instance and fill that backed with information from database |
|
240 | 240 | |
|
241 | 241 | :param all_repos: list of repository names as strings |
|
242 | 242 | give specific repositories list, good for filtering |
|
243 | 243 | |
|
244 | 244 | :param sort_key: initial sorting of repositories |
|
245 | 245 | """ |
|
246 | 246 | if all_repos is None: |
|
247 | 247 | all_repos = self.sa.query(Repository)\ |
|
248 | 248 | .filter(Repository.group_id == None)\ |
|
249 | 249 | .order_by(func.lower(Repository.repo_name)).all() |
|
250 | 250 | repo_iter = SimpleCachedRepoList( |
|
251 | 251 | all_repos, repos_path=self.repos_path, order_by=sort_key) |
|
252 | 252 | return repo_iter |
|
253 | 253 | |
|
254 | 254 | def get_repo_groups(self, all_groups=None): |
|
255 | 255 | if all_groups is None: |
|
256 | 256 | all_groups = RepoGroup.query()\ |
|
257 | 257 | .filter(RepoGroup.group_parent_id == None).all() |
|
258 | 258 | return [x for x in RepoGroupList(all_groups)] |
|
259 | 259 | |
|
260 | 260 | def mark_for_invalidation(self, repo_name, delete=False): |
|
261 | 261 | """ |
|
262 | 262 | Mark caches of this repo invalid in the database. `delete` flag |
|
263 | 263 | removes the cache entries |
|
264 | 264 | |
|
265 | 265 | :param repo_name: the repo_name for which caches should be marked |
|
266 | 266 | invalid, or deleted |
|
267 | 267 | :param delete: delete the entry keys instead of setting bool |
|
268 | 268 | flag on them, and also purge caches used by the dogpile |
|
269 | 269 | """ |
|
270 | 270 | repo = Repository.get_by_repo_name(repo_name) |
|
271 | 271 | |
|
272 | 272 | if repo: |
|
273 | 273 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( |
|
274 | 274 | repo_id=repo.repo_id) |
|
275 | 275 | CacheKey.set_invalidate(invalidation_namespace, delete=delete) |
|
276 | 276 | |
|
277 | 277 | repo_id = repo.repo_id |
|
278 | 278 | config = repo._config |
|
279 | 279 | config.set('extensions', 'largefiles', '') |
|
280 | 280 | repo.update_commit_cache(config=config, cs_cache=None) |
|
281 | 281 | if delete: |
|
282 | 282 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) |
|
283 | 283 | rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid) |
|
284 | 284 | |
|
285 | 285 | def toggle_following_repo(self, follow_repo_id, user_id): |
|
286 | 286 | |
|
287 | 287 | f = self.sa.query(UserFollowing)\ |
|
288 | 288 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ |
|
289 | 289 | .filter(UserFollowing.user_id == user_id).scalar() |
|
290 | 290 | |
|
291 | 291 | if f is not None: |
|
292 | 292 | try: |
|
293 | 293 | self.sa.delete(f) |
|
294 | 294 | return |
|
295 | 295 | except Exception: |
|
296 | 296 | log.error(traceback.format_exc()) |
|
297 | 297 | raise |
|
298 | 298 | |
|
299 | 299 | try: |
|
300 | 300 | f = UserFollowing() |
|
301 | 301 | f.user_id = user_id |
|
302 | 302 | f.follows_repo_id = follow_repo_id |
|
303 | 303 | self.sa.add(f) |
|
304 | 304 | except Exception: |
|
305 | 305 | log.error(traceback.format_exc()) |
|
306 | 306 | raise |
|
307 | 307 | |
|
308 | 308 | def toggle_following_user(self, follow_user_id, user_id): |
|
309 | 309 | f = self.sa.query(UserFollowing)\ |
|
310 | 310 | .filter(UserFollowing.follows_user_id == follow_user_id)\ |
|
311 | 311 | .filter(UserFollowing.user_id == user_id).scalar() |
|
312 | 312 | |
|
313 | 313 | if f is not None: |
|
314 | 314 | try: |
|
315 | 315 | self.sa.delete(f) |
|
316 | 316 | return |
|
317 | 317 | except Exception: |
|
318 | 318 | log.error(traceback.format_exc()) |
|
319 | 319 | raise |
|
320 | 320 | |
|
321 | 321 | try: |
|
322 | 322 | f = UserFollowing() |
|
323 | 323 | f.user_id = user_id |
|
324 | 324 | f.follows_user_id = follow_user_id |
|
325 | 325 | self.sa.add(f) |
|
326 | 326 | except Exception: |
|
327 | 327 | log.error(traceback.format_exc()) |
|
328 | 328 | raise |
|
329 | 329 | |
|
330 | 330 | def is_following_repo(self, repo_name, user_id, cache=False): |
|
331 | 331 | r = self.sa.query(Repository)\ |
|
332 | 332 | .filter(Repository.repo_name == repo_name).scalar() |
|
333 | 333 | |
|
334 | 334 | f = self.sa.query(UserFollowing)\ |
|
335 | 335 | .filter(UserFollowing.follows_repository == r)\ |
|
336 | 336 | .filter(UserFollowing.user_id == user_id).scalar() |
|
337 | 337 | |
|
338 | 338 | return f is not None |
|
339 | 339 | |
|
340 | 340 | def is_following_user(self, username, user_id, cache=False): |
|
341 | 341 | u = User.get_by_username(username) |
|
342 | 342 | |
|
343 | 343 | f = self.sa.query(UserFollowing)\ |
|
344 | 344 | .filter(UserFollowing.follows_user == u)\ |
|
345 | 345 | .filter(UserFollowing.user_id == user_id).scalar() |
|
346 | 346 | |
|
347 | 347 | return f is not None |
|
348 | 348 | |
|
349 | 349 | def get_followers(self, repo): |
|
350 | 350 | repo = self._get_repo(repo) |
|
351 | 351 | |
|
352 | 352 | return self.sa.query(UserFollowing)\ |
|
353 | 353 | .filter(UserFollowing.follows_repository == repo).count() |
|
354 | 354 | |
|
355 | 355 | def get_forks(self, repo): |
|
356 | 356 | repo = self._get_repo(repo) |
|
357 | 357 | return self.sa.query(Repository)\ |
|
358 | 358 | .filter(Repository.fork == repo).count() |
|
359 | 359 | |
|
360 | 360 | def get_pull_requests(self, repo): |
|
361 | 361 | repo = self._get_repo(repo) |
|
362 | 362 | return self.sa.query(PullRequest)\ |
|
363 | 363 | .filter(PullRequest.target_repo == repo)\ |
|
364 | 364 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() |
|
365 | 365 | |
|
366 | 366 | def mark_as_fork(self, repo, fork, user): |
|
367 | 367 | repo = self._get_repo(repo) |
|
368 | 368 | fork = self._get_repo(fork) |
|
369 | 369 | if fork and repo.repo_id == fork.repo_id: |
|
370 | 370 | raise Exception("Cannot set repository as fork of itself") |
|
371 | 371 | |
|
372 | 372 | if fork and repo.repo_type != fork.repo_type: |
|
373 | 373 | raise RepositoryError( |
|
374 | 374 | "Cannot set repository as fork of repository with other type") |
|
375 | 375 | |
|
376 | 376 | repo.fork = fork |
|
377 | 377 | self.sa.add(repo) |
|
378 | 378 | return repo |
|
379 | 379 | |
|
380 | 380 | def pull_changes(self, repo, username, remote_uri=None, validate_uri=True): |
|
381 | 381 | dbrepo = self._get_repo(repo) |
|
382 | 382 | remote_uri = remote_uri or dbrepo.clone_uri |
|
383 | 383 | if not remote_uri: |
|
384 | 384 | raise Exception("This repository doesn't have a clone uri") |
|
385 | 385 | |
|
386 | 386 | repo = dbrepo.scm_instance(cache=False) |
|
387 | 387 | repo.config.clear_section('hooks') |
|
388 | 388 | |
|
389 | 389 | try: |
|
390 | 390 | # NOTE(marcink): add extra validation so we skip invalid urls |
|
391 | 391 | # this is due this tasks can be executed via scheduler without |
|
392 | 392 | # proper validation of remote_uri |
|
393 | 393 | if validate_uri: |
|
394 | 394 | config = make_db_config(clear_session=False) |
|
395 | 395 | url_validator(remote_uri, dbrepo.repo_type, config) |
|
396 | 396 | except InvalidCloneUrl: |
|
397 | 397 | raise |
|
398 | 398 | |
|
399 | 399 | repo_name = dbrepo.repo_name |
|
400 | 400 | try: |
|
401 | 401 | # TODO: we need to make sure those operations call proper hooks ! |
|
402 |
repo. |
|
|
402 | repo.fetch(remote_uri) | |
|
403 | 403 | |
|
404 | 404 | self.mark_for_invalidation(repo_name) |
|
405 | 405 | except Exception: |
|
406 | 406 | log.error(traceback.format_exc()) |
|
407 | 407 | raise |
|
408 | 408 | |
|
409 | 409 | def push_changes(self, repo, username, remote_uri=None, validate_uri=True): |
|
410 | 410 | dbrepo = self._get_repo(repo) |
|
411 | 411 | remote_uri = remote_uri or dbrepo.push_uri |
|
412 | 412 | if not remote_uri: |
|
413 | 413 | raise Exception("This repository doesn't have a clone uri") |
|
414 | 414 | |
|
415 | 415 | repo = dbrepo.scm_instance(cache=False) |
|
416 | 416 | repo.config.clear_section('hooks') |
|
417 | 417 | |
|
418 | 418 | try: |
|
419 | 419 | # NOTE(marcink): add extra validation so we skip invalid urls |
|
420 | 420 | # this is due this tasks can be executed via scheduler without |
|
421 | 421 | # proper validation of remote_uri |
|
422 | 422 | if validate_uri: |
|
423 | 423 | config = make_db_config(clear_session=False) |
|
424 | 424 | url_validator(remote_uri, dbrepo.repo_type, config) |
|
425 | 425 | except InvalidCloneUrl: |
|
426 | 426 | raise |
|
427 | 427 | |
|
428 | 428 | try: |
|
429 | 429 | repo.push(remote_uri) |
|
430 | 430 | except Exception: |
|
431 | 431 | log.error(traceback.format_exc()) |
|
432 | 432 | raise |
|
433 | 433 | |
|
434 | 434 | def commit_change(self, repo, repo_name, commit, user, author, message, |
|
435 | 435 | content, f_path): |
|
436 | 436 | """ |
|
437 | 437 | Commits changes |
|
438 | 438 | |
|
439 | 439 | :param repo: SCM instance |
|
440 | 440 | |
|
441 | 441 | """ |
|
442 | 442 | user = self._get_user(user) |
|
443 | 443 | |
|
444 | 444 | # decoding here will force that we have proper encoded values |
|
445 | 445 | # in any other case this will throw exceptions and deny commit |
|
446 | 446 | content = safe_str(content) |
|
447 | 447 | path = safe_str(f_path) |
|
448 | 448 | # message and author needs to be unicode |
|
449 | 449 | # proper backend should then translate that into required type |
|
450 | 450 | message = safe_unicode(message) |
|
451 | 451 | author = safe_unicode(author) |
|
452 | 452 | imc = repo.in_memory_commit |
|
453 | 453 | imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) |
|
454 | 454 | try: |
|
455 | 455 | # TODO: handle pre-push action ! |
|
456 | 456 | tip = imc.commit( |
|
457 | 457 | message=message, author=author, parents=[commit], |
|
458 | 458 | branch=commit.branch) |
|
459 | 459 | except Exception as e: |
|
460 | 460 | log.error(traceback.format_exc()) |
|
461 | 461 | raise IMCCommitError(str(e)) |
|
462 | 462 | finally: |
|
463 | 463 | # always clear caches, if commit fails we want fresh object also |
|
464 | 464 | self.mark_for_invalidation(repo_name) |
|
465 | 465 | |
|
466 | 466 | # We trigger the post-push action |
|
467 | 467 | hooks_utils.trigger_post_push_hook( |
|
468 | 468 | username=user.username, action='push_local', repo_name=repo_name, |
|
469 | 469 | repo_alias=repo.alias, commit_ids=[tip.raw_id]) |
|
470 | 470 | return tip |
|
471 | 471 | |
|
472 | 472 | def _sanitize_path(self, f_path): |
|
473 | 473 | if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path: |
|
474 | 474 | raise NonRelativePathError('%s is not an relative path' % f_path) |
|
475 | 475 | if f_path: |
|
476 | 476 | f_path = os.path.normpath(f_path) |
|
477 | 477 | return f_path |
|
478 | 478 | |
|
479 | 479 | def get_dirnode_metadata(self, request, commit, dir_node): |
|
480 | 480 | if not dir_node.is_dir(): |
|
481 | 481 | return [] |
|
482 | 482 | |
|
483 | 483 | data = [] |
|
484 | 484 | for node in dir_node: |
|
485 | 485 | if not node.is_file(): |
|
486 | 486 | # we skip file-nodes |
|
487 | 487 | continue |
|
488 | 488 | |
|
489 | 489 | last_commit = node.last_commit |
|
490 | 490 | last_commit_date = last_commit.date |
|
491 | 491 | data.append({ |
|
492 | 492 | 'name': node.name, |
|
493 | 493 | 'size': h.format_byte_size_binary(node.size), |
|
494 | 494 | 'modified_at': h.format_date(last_commit_date), |
|
495 | 495 | 'modified_ts': last_commit_date.isoformat(), |
|
496 | 496 | 'revision': last_commit.revision, |
|
497 | 497 | 'short_id': last_commit.short_id, |
|
498 | 498 | 'message': h.escape(last_commit.message), |
|
499 | 499 | 'author': h.escape(last_commit.author), |
|
500 | 500 | 'user_profile': h.gravatar_with_user( |
|
501 | 501 | request, last_commit.author), |
|
502 | 502 | }) |
|
503 | 503 | |
|
504 | 504 | return data |
|
505 | 505 | |
|
506 | 506 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, |
|
507 | 507 | extended_info=False, content=False, max_file_bytes=None): |
|
508 | 508 | """ |
|
509 | 509 | recursive walk in root dir and return a set of all path in that dir |
|
510 | 510 | based on repository walk function |
|
511 | 511 | |
|
512 | 512 | :param repo_name: name of repository |
|
513 | 513 | :param commit_id: commit id for which to list nodes |
|
514 | 514 | :param root_path: root path to list |
|
515 | 515 | :param flat: return as a list, if False returns a dict with description |
|
516 | 516 | :param max_file_bytes: will not return file contents over this limit |
|
517 | 517 | |
|
518 | 518 | """ |
|
519 | 519 | _files = list() |
|
520 | 520 | _dirs = list() |
|
521 | 521 | try: |
|
522 | 522 | _repo = self._get_repo(repo_name) |
|
523 | 523 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
524 | 524 | root_path = root_path.lstrip('/') |
|
525 | 525 | for __, dirs, files in commit.walk(root_path): |
|
526 | 526 | for f in files: |
|
527 | 527 | _content = None |
|
528 | 528 | _data = f.unicode_path |
|
529 | 529 | over_size_limit = (max_file_bytes is not None |
|
530 | 530 | and f.size > max_file_bytes) |
|
531 | 531 | |
|
532 | 532 | if not flat: |
|
533 | 533 | _data = { |
|
534 | 534 | "name": h.escape(f.unicode_path), |
|
535 | 535 | "type": "file", |
|
536 | 536 | } |
|
537 | 537 | if extended_info: |
|
538 | 538 | _data.update({ |
|
539 | 539 | "md5": f.md5, |
|
540 | 540 | "binary": f.is_binary, |
|
541 | 541 | "size": f.size, |
|
542 | 542 | "extension": f.extension, |
|
543 | 543 | "mimetype": f.mimetype, |
|
544 | 544 | "lines": f.lines()[0] |
|
545 | 545 | }) |
|
546 | 546 | |
|
547 | 547 | if content: |
|
548 | 548 | full_content = None |
|
549 | 549 | if not f.is_binary and not over_size_limit: |
|
550 | 550 | full_content = safe_str(f.content) |
|
551 | 551 | |
|
552 | 552 | _data.update({ |
|
553 | 553 | "content": full_content, |
|
554 | 554 | }) |
|
555 | 555 | _files.append(_data) |
|
556 | 556 | for d in dirs: |
|
557 | 557 | _data = d.unicode_path |
|
558 | 558 | if not flat: |
|
559 | 559 | _data = { |
|
560 | 560 | "name": h.escape(d.unicode_path), |
|
561 | 561 | "type": "dir", |
|
562 | 562 | } |
|
563 | 563 | if extended_info: |
|
564 | 564 | _data.update({ |
|
565 | 565 | "md5": None, |
|
566 | 566 | "binary": None, |
|
567 | 567 | "size": None, |
|
568 | 568 | "extension": None, |
|
569 | 569 | }) |
|
570 | 570 | if content: |
|
571 | 571 | _data.update({ |
|
572 | 572 | "content": None |
|
573 | 573 | }) |
|
574 | 574 | _dirs.append(_data) |
|
575 | 575 | except RepositoryError: |
|
576 | 576 | log.debug("Exception in get_nodes", exc_info=True) |
|
577 | 577 | raise |
|
578 | 578 | |
|
579 | 579 | return _dirs, _files |
|
580 | 580 | |
|
581 | 581 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
582 | 582 | author=None, trigger_push_hook=True): |
|
583 | 583 | """ |
|
584 | 584 | Commits given multiple nodes into repo |
|
585 | 585 | |
|
586 | 586 | :param user: RhodeCode User object or user_id, the commiter |
|
587 | 587 | :param repo: RhodeCode Repository object |
|
588 | 588 | :param message: commit message |
|
589 | 589 | :param nodes: mapping {filename:{'content':content},...} |
|
590 | 590 | :param parent_commit: parent commit, can be empty than it's |
|
591 | 591 | initial commit |
|
592 | 592 | :param author: author of commit, cna be different that commiter |
|
593 | 593 | only for git |
|
594 | 594 | :param trigger_push_hook: trigger push hooks |
|
595 | 595 | |
|
596 | 596 | :returns: new commited commit |
|
597 | 597 | """ |
|
598 | 598 | |
|
599 | 599 | user = self._get_user(user) |
|
600 | 600 | scm_instance = repo.scm_instance(cache=False) |
|
601 | 601 | |
|
602 | 602 | processed_nodes = [] |
|
603 | 603 | for f_path in nodes: |
|
604 | 604 | f_path = self._sanitize_path(f_path) |
|
605 | 605 | content = nodes[f_path]['content'] |
|
606 | 606 | f_path = safe_str(f_path) |
|
607 | 607 | # decoding here will force that we have proper encoded values |
|
608 | 608 | # in any other case this will throw exceptions and deny commit |
|
609 | 609 | if isinstance(content, (basestring,)): |
|
610 | 610 | content = safe_str(content) |
|
611 | 611 | elif isinstance(content, (file, cStringIO.OutputType,)): |
|
612 | 612 | content = content.read() |
|
613 | 613 | else: |
|
614 | 614 | raise Exception('Content is of unrecognized type %s' % ( |
|
615 | 615 | type(content) |
|
616 | 616 | )) |
|
617 | 617 | processed_nodes.append((f_path, content)) |
|
618 | 618 | |
|
619 | 619 | message = safe_unicode(message) |
|
620 | 620 | commiter = user.full_contact |
|
621 | 621 | author = safe_unicode(author) if author else commiter |
|
622 | 622 | |
|
623 | 623 | imc = scm_instance.in_memory_commit |
|
624 | 624 | |
|
625 | 625 | if not parent_commit: |
|
626 | 626 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
627 | 627 | |
|
628 | 628 | if isinstance(parent_commit, EmptyCommit): |
|
629 | 629 | # EmptyCommit means we we're editing empty repository |
|
630 | 630 | parents = None |
|
631 | 631 | else: |
|
632 | 632 | parents = [parent_commit] |
|
633 | 633 | # add multiple nodes |
|
634 | 634 | for path, content in processed_nodes: |
|
635 | 635 | imc.add(FileNode(path, content=content)) |
|
636 | 636 | # TODO: handle pre push scenario |
|
637 | 637 | tip = imc.commit(message=message, |
|
638 | 638 | author=author, |
|
639 | 639 | parents=parents, |
|
640 | 640 | branch=parent_commit.branch) |
|
641 | 641 | |
|
642 | 642 | self.mark_for_invalidation(repo.repo_name) |
|
643 | 643 | if trigger_push_hook: |
|
644 | 644 | hooks_utils.trigger_post_push_hook( |
|
645 | 645 | username=user.username, action='push_local', |
|
646 | 646 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
647 | 647 | commit_ids=[tip.raw_id]) |
|
648 | 648 | return tip |
|
649 | 649 | |
|
650 | 650 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
651 | 651 | author=None, trigger_push_hook=True): |
|
652 | 652 | user = self._get_user(user) |
|
653 | 653 | scm_instance = repo.scm_instance(cache=False) |
|
654 | 654 | |
|
655 | 655 | message = safe_unicode(message) |
|
656 | 656 | commiter = user.full_contact |
|
657 | 657 | author = safe_unicode(author) if author else commiter |
|
658 | 658 | |
|
659 | 659 | imc = scm_instance.in_memory_commit |
|
660 | 660 | |
|
661 | 661 | if not parent_commit: |
|
662 | 662 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
663 | 663 | |
|
664 | 664 | if isinstance(parent_commit, EmptyCommit): |
|
665 | 665 | # EmptyCommit means we we're editing empty repository |
|
666 | 666 | parents = None |
|
667 | 667 | else: |
|
668 | 668 | parents = [parent_commit] |
|
669 | 669 | |
|
670 | 670 | # add multiple nodes |
|
671 | 671 | for _filename, data in nodes.items(): |
|
672 | 672 | # new filename, can be renamed from the old one, also sanitaze |
|
673 | 673 | # the path for any hack around relative paths like ../../ etc. |
|
674 | 674 | filename = self._sanitize_path(data['filename']) |
|
675 | 675 | old_filename = self._sanitize_path(_filename) |
|
676 | 676 | content = data['content'] |
|
677 | 677 | |
|
678 | 678 | filenode = FileNode(old_filename, content=content) |
|
679 | 679 | op = data['op'] |
|
680 | 680 | if op == 'add': |
|
681 | 681 | imc.add(filenode) |
|
682 | 682 | elif op == 'del': |
|
683 | 683 | imc.remove(filenode) |
|
684 | 684 | elif op == 'mod': |
|
685 | 685 | if filename != old_filename: |
|
686 | 686 | # TODO: handle renames more efficient, needs vcs lib |
|
687 | 687 | # changes |
|
688 | 688 | imc.remove(filenode) |
|
689 | 689 | imc.add(FileNode(filename, content=content)) |
|
690 | 690 | else: |
|
691 | 691 | imc.change(filenode) |
|
692 | 692 | |
|
693 | 693 | try: |
|
694 | 694 | # TODO: handle pre push scenario |
|
695 | 695 | # commit changes |
|
696 | 696 | tip = imc.commit(message=message, |
|
697 | 697 | author=author, |
|
698 | 698 | parents=parents, |
|
699 | 699 | branch=parent_commit.branch) |
|
700 | 700 | except NodeNotChangedError: |
|
701 | 701 | raise |
|
702 | 702 | except Exception as e: |
|
703 | 703 | log.exception("Unexpected exception during call to imc.commit") |
|
704 | 704 | raise IMCCommitError(str(e)) |
|
705 | 705 | finally: |
|
706 | 706 | # always clear caches, if commit fails we want fresh object also |
|
707 | 707 | self.mark_for_invalidation(repo.repo_name) |
|
708 | 708 | |
|
709 | 709 | if trigger_push_hook: |
|
710 | 710 | hooks_utils.trigger_post_push_hook( |
|
711 | 711 | username=user.username, action='push_local', |
|
712 | 712 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
713 | 713 | commit_ids=[tip.raw_id]) |
|
714 | 714 | |
|
715 | 715 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
716 | 716 | author=None, trigger_push_hook=True): |
|
717 | 717 | """ |
|
718 | 718 | Deletes given multiple nodes into `repo` |
|
719 | 719 | |
|
720 | 720 | :param user: RhodeCode User object or user_id, the committer |
|
721 | 721 | :param repo: RhodeCode Repository object |
|
722 | 722 | :param message: commit message |
|
723 | 723 | :param nodes: mapping {filename:{'content':content},...} |
|
724 | 724 | :param parent_commit: parent commit, can be empty than it's initial |
|
725 | 725 | commit |
|
726 | 726 | :param author: author of commit, cna be different that commiter only |
|
727 | 727 | for git |
|
728 | 728 | :param trigger_push_hook: trigger push hooks |
|
729 | 729 | |
|
730 | 730 | :returns: new commit after deletion |
|
731 | 731 | """ |
|
732 | 732 | |
|
733 | 733 | user = self._get_user(user) |
|
734 | 734 | scm_instance = repo.scm_instance(cache=False) |
|
735 | 735 | |
|
736 | 736 | processed_nodes = [] |
|
737 | 737 | for f_path in nodes: |
|
738 | 738 | f_path = self._sanitize_path(f_path) |
|
739 | 739 | # content can be empty but for compatabilty it allows same dicts |
|
740 | 740 | # structure as add_nodes |
|
741 | 741 | content = nodes[f_path].get('content') |
|
742 | 742 | processed_nodes.append((f_path, content)) |
|
743 | 743 | |
|
744 | 744 | message = safe_unicode(message) |
|
745 | 745 | commiter = user.full_contact |
|
746 | 746 | author = safe_unicode(author) if author else commiter |
|
747 | 747 | |
|
748 | 748 | imc = scm_instance.in_memory_commit |
|
749 | 749 | |
|
750 | 750 | if not parent_commit: |
|
751 | 751 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
752 | 752 | |
|
753 | 753 | if isinstance(parent_commit, EmptyCommit): |
|
754 | 754 | # EmptyCommit means we we're editing empty repository |
|
755 | 755 | parents = None |
|
756 | 756 | else: |
|
757 | 757 | parents = [parent_commit] |
|
758 | 758 | # add multiple nodes |
|
759 | 759 | for path, content in processed_nodes: |
|
760 | 760 | imc.remove(FileNode(path, content=content)) |
|
761 | 761 | |
|
762 | 762 | # TODO: handle pre push scenario |
|
763 | 763 | tip = imc.commit(message=message, |
|
764 | 764 | author=author, |
|
765 | 765 | parents=parents, |
|
766 | 766 | branch=parent_commit.branch) |
|
767 | 767 | |
|
768 | 768 | self.mark_for_invalidation(repo.repo_name) |
|
769 | 769 | if trigger_push_hook: |
|
770 | 770 | hooks_utils.trigger_post_push_hook( |
|
771 | 771 | username=user.username, action='push_local', |
|
772 | 772 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
773 | 773 | commit_ids=[tip.raw_id]) |
|
774 | 774 | return tip |
|
775 | 775 | |
|
776 | 776 | def strip(self, repo, commit_id, branch): |
|
777 | 777 | scm_instance = repo.scm_instance(cache=False) |
|
778 | 778 | scm_instance.config.clear_section('hooks') |
|
779 | 779 | scm_instance.strip(commit_id, branch) |
|
780 | 780 | self.mark_for_invalidation(repo.repo_name) |
|
781 | 781 | |
|
782 | 782 | def get_unread_journal(self): |
|
783 | 783 | return self.sa.query(UserLog).count() |
|
784 | 784 | |
|
785 | 785 | def get_repo_landing_revs(self, translator, repo=None): |
|
786 | 786 | """ |
|
787 | 787 | Generates select option with tags branches and bookmarks (for hg only) |
|
788 | 788 | grouped by type |
|
789 | 789 | |
|
790 | 790 | :param repo: |
|
791 | 791 | """ |
|
792 | 792 | _ = translator |
|
793 | 793 | repo = self._get_repo(repo) |
|
794 | 794 | |
|
795 | 795 | hist_l = [ |
|
796 | 796 | ['rev:tip', _('latest tip')] |
|
797 | 797 | ] |
|
798 | 798 | choices = [ |
|
799 | 799 | 'rev:tip' |
|
800 | 800 | ] |
|
801 | 801 | |
|
802 | 802 | if not repo: |
|
803 | 803 | return choices, hist_l |
|
804 | 804 | |
|
805 | 805 | repo = repo.scm_instance() |
|
806 | 806 | |
|
807 | 807 | branches_group = ( |
|
808 | 808 | [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) |
|
809 | 809 | for b in repo.branches], |
|
810 | 810 | _("Branches")) |
|
811 | 811 | hist_l.append(branches_group) |
|
812 | 812 | choices.extend([x[0] for x in branches_group[0]]) |
|
813 | 813 | |
|
814 | 814 | if repo.alias == 'hg': |
|
815 | 815 | bookmarks_group = ( |
|
816 | 816 | [(u'book:%s' % safe_unicode(b), safe_unicode(b)) |
|
817 | 817 | for b in repo.bookmarks], |
|
818 | 818 | _("Bookmarks")) |
|
819 | 819 | hist_l.append(bookmarks_group) |
|
820 | 820 | choices.extend([x[0] for x in bookmarks_group[0]]) |
|
821 | 821 | |
|
822 | 822 | tags_group = ( |
|
823 | 823 | [(u'tag:%s' % safe_unicode(t), safe_unicode(t)) |
|
824 | 824 | for t in repo.tags], |
|
825 | 825 | _("Tags")) |
|
826 | 826 | hist_l.append(tags_group) |
|
827 | 827 | choices.extend([x[0] for x in tags_group[0]]) |
|
828 | 828 | |
|
829 | 829 | return choices, hist_l |
|
830 | 830 | |
|
831 | 831 | def get_server_info(self, environ=None): |
|
832 | 832 | server_info = get_system_info(environ) |
|
833 | 833 | return server_info |
@@ -1,1865 +1,1867 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import collections |
|
22 | 22 | import datetime |
|
23 | 23 | import hashlib |
|
24 | 24 | import os |
|
25 | 25 | import re |
|
26 | 26 | import pprint |
|
27 | 27 | import shutil |
|
28 | 28 | import socket |
|
29 | 29 | import subprocess32 |
|
30 | 30 | import time |
|
31 | 31 | import uuid |
|
32 | 32 | import dateutil.tz |
|
33 | 33 | import functools |
|
34 | 34 | |
|
35 | 35 | import mock |
|
36 | 36 | import pyramid.testing |
|
37 | 37 | import pytest |
|
38 | 38 | import colander |
|
39 | 39 | import requests |
|
40 | 40 | import pyramid.paster |
|
41 | 41 | |
|
42 | 42 | import rhodecode |
|
43 | 43 | from rhodecode.lib.utils2 import AttributeDict |
|
44 | 44 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
45 | 45 | from rhodecode.model.comment import CommentsModel |
|
46 | 46 | from rhodecode.model.db import ( |
|
47 | 47 | PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup, |
|
48 | 48 | UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) |
|
49 | 49 | from rhodecode.model.meta import Session |
|
50 | 50 | from rhodecode.model.pull_request import PullRequestModel |
|
51 | 51 | from rhodecode.model.repo import RepoModel |
|
52 | 52 | from rhodecode.model.repo_group import RepoGroupModel |
|
53 | 53 | from rhodecode.model.user import UserModel |
|
54 | 54 | from rhodecode.model.settings import VcsSettingsModel |
|
55 | 55 | from rhodecode.model.user_group import UserGroupModel |
|
56 | 56 | from rhodecode.model.integration import IntegrationModel |
|
57 | 57 | from rhodecode.integrations import integration_type_registry |
|
58 | 58 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
59 | 59 | from rhodecode.lib.utils import repo2db_mapper |
|
60 | 60 | from rhodecode.lib.vcs import create_vcsserver_proxy |
|
61 | 61 | from rhodecode.lib.vcs.backends import get_backend |
|
62 | 62 | from rhodecode.lib.vcs.nodes import FileNode |
|
63 | 63 | from rhodecode.tests import ( |
|
64 | 64 | login_user_session, get_new_dir, utils, TESTS_TMP_PATH, |
|
65 | 65 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, |
|
66 | 66 | TEST_USER_REGULAR_PASS) |
|
67 | 67 | from rhodecode.tests.utils import CustomTestApp, set_anonymous_access |
|
68 | 68 | from rhodecode.tests.fixture import Fixture |
|
69 | 69 | from rhodecode.config import utils as config_utils |
|
70 | 70 | |
|
71 | 71 | def _split_comma(value): |
|
72 | 72 | return value.split(',') |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | def pytest_addoption(parser): |
|
76 | 76 | parser.addoption( |
|
77 | 77 | '--keep-tmp-path', action='store_true', |
|
78 | 78 | help="Keep the test temporary directories") |
|
79 | 79 | parser.addoption( |
|
80 | 80 | '--backends', action='store', type=_split_comma, |
|
81 | 81 | default=['git', 'hg', 'svn'], |
|
82 | 82 | help="Select which backends to test for backend specific tests.") |
|
83 | 83 | parser.addoption( |
|
84 | 84 | '--dbs', action='store', type=_split_comma, |
|
85 | 85 | default=['sqlite'], |
|
86 | 86 | help="Select which database to test for database specific tests. " |
|
87 | 87 | "Possible options are sqlite,postgres,mysql") |
|
88 | 88 | parser.addoption( |
|
89 | 89 | '--appenlight', '--ae', action='store_true', |
|
90 | 90 | help="Track statistics in appenlight.") |
|
91 | 91 | parser.addoption( |
|
92 | 92 | '--appenlight-api-key', '--ae-key', |
|
93 | 93 | help="API key for Appenlight.") |
|
94 | 94 | parser.addoption( |
|
95 | 95 | '--appenlight-url', '--ae-url', |
|
96 | 96 | default="https://ae.rhodecode.com", |
|
97 | 97 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") |
|
98 | 98 | parser.addoption( |
|
99 | 99 | '--sqlite-connection-string', action='store', |
|
100 | 100 | default='', help="Connection string for the dbs tests with SQLite") |
|
101 | 101 | parser.addoption( |
|
102 | 102 | '--postgres-connection-string', action='store', |
|
103 | 103 | default='', help="Connection string for the dbs tests with Postgres") |
|
104 | 104 | parser.addoption( |
|
105 | 105 | '--mysql-connection-string', action='store', |
|
106 | 106 | default='', help="Connection string for the dbs tests with MySQL") |
|
107 | 107 | parser.addoption( |
|
108 | 108 | '--repeat', type=int, default=100, |
|
109 | 109 | help="Number of repetitions in performance tests.") |
|
110 | 110 | |
|
111 | 111 | |
|
112 | 112 | def pytest_configure(config): |
|
113 | 113 | from rhodecode.config import patches |
|
114 | 114 | |
|
115 | 115 | |
|
116 | 116 | def pytest_collection_modifyitems(session, config, items): |
|
117 | 117 | # nottest marked, compare nose, used for transition from nose to pytest |
|
118 | 118 | remaining = [ |
|
119 | 119 | i for i in items if getattr(i.obj, '__test__', True)] |
|
120 | 120 | items[:] = remaining |
|
121 | 121 | |
|
122 | 122 | |
|
123 | 123 | def pytest_generate_tests(metafunc): |
|
124 | 124 | # Support test generation based on --backend parameter |
|
125 | 125 | if 'backend_alias' in metafunc.fixturenames: |
|
126 | 126 | backends = get_backends_from_metafunc(metafunc) |
|
127 | 127 | scope = None |
|
128 | 128 | if not backends: |
|
129 | 129 | pytest.skip("Not enabled for any of selected backends") |
|
130 | 130 | metafunc.parametrize('backend_alias', backends, scope=scope) |
|
131 | 131 | elif hasattr(metafunc.function, 'backends'): |
|
132 | 132 | backends = get_backends_from_metafunc(metafunc) |
|
133 | 133 | if not backends: |
|
134 | 134 | pytest.skip("Not enabled for any of selected backends") |
|
135 | 135 | |
|
136 | 136 | |
|
137 | 137 | def get_backends_from_metafunc(metafunc): |
|
138 | 138 | requested_backends = set(metafunc.config.getoption('--backends')) |
|
139 | 139 | if hasattr(metafunc.function, 'backends'): |
|
140 | 140 | # Supported backends by this test function, created from |
|
141 | 141 | # pytest.mark.backends |
|
142 | 142 | backends = metafunc.function.backends.args |
|
143 | 143 | elif hasattr(metafunc.cls, 'backend_alias'): |
|
144 | 144 | # Support class attribute "backend_alias", this is mainly |
|
145 | 145 | # for legacy reasons for tests not yet using pytest.mark.backends |
|
146 | 146 | backends = [metafunc.cls.backend_alias] |
|
147 | 147 | else: |
|
148 | 148 | backends = metafunc.config.getoption('--backends') |
|
149 | 149 | return requested_backends.intersection(backends) |
|
150 | 150 | |
|
151 | 151 | |
|
152 | 152 | @pytest.fixture(scope='session', autouse=True) |
|
153 | 153 | def activate_example_rcextensions(request): |
|
154 | 154 | """ |
|
155 | 155 | Patch in an example rcextensions module which verifies passed in kwargs. |
|
156 | 156 | """ |
|
157 | 157 | from rhodecode.tests.other import example_rcextensions |
|
158 | 158 | |
|
159 | 159 | old_extensions = rhodecode.EXTENSIONS |
|
160 | 160 | rhodecode.EXTENSIONS = example_rcextensions |
|
161 | 161 | |
|
162 | 162 | @request.addfinalizer |
|
163 | 163 | def cleanup(): |
|
164 | 164 | rhodecode.EXTENSIONS = old_extensions |
|
165 | 165 | |
|
166 | 166 | |
|
167 | 167 | @pytest.fixture |
|
168 | 168 | def capture_rcextensions(): |
|
169 | 169 | """ |
|
170 | 170 | Returns the recorded calls to entry points in rcextensions. |
|
171 | 171 | """ |
|
172 | 172 | calls = rhodecode.EXTENSIONS.calls |
|
173 | 173 | calls.clear() |
|
174 | 174 | # Note: At this moment, it is still the empty dict, but that will |
|
175 | 175 | # be filled during the test run and since it is a reference this |
|
176 | 176 | # is enough to make it work. |
|
177 | 177 | return calls |
|
178 | 178 | |
|
179 | 179 | |
|
180 | 180 | @pytest.fixture(scope='session') |
|
181 | 181 | def http_environ_session(): |
|
182 | 182 | """ |
|
183 | 183 | Allow to use "http_environ" in session scope. |
|
184 | 184 | """ |
|
185 | 185 | return http_environ( |
|
186 | 186 | http_host_stub=http_host_stub()) |
|
187 | 187 | |
|
188 | 188 | |
|
189 | 189 | @pytest.fixture |
|
190 | 190 | def http_host_stub(): |
|
191 | 191 | """ |
|
192 | 192 | Value of HTTP_HOST in the test run. |
|
193 | 193 | """ |
|
194 | 194 | return 'example.com:80' |
|
195 | 195 | |
|
196 | 196 | |
|
197 | 197 | @pytest.fixture |
|
198 | 198 | def http_host_only_stub(): |
|
199 | 199 | """ |
|
200 | 200 | Value of HTTP_HOST in the test run. |
|
201 | 201 | """ |
|
202 | 202 | return http_host_stub().split(':')[0] |
|
203 | 203 | |
|
204 | 204 | |
|
205 | 205 | @pytest.fixture |
|
206 | 206 | def http_environ(http_host_stub): |
|
207 | 207 | """ |
|
208 | 208 | HTTP extra environ keys. |
|
209 | 209 | |
|
210 | 210 | User by the test application and as well for setting up the pylons |
|
211 | 211 | environment. In the case of the fixture "app" it should be possible |
|
212 | 212 | to override this for a specific test case. |
|
213 | 213 | """ |
|
214 | 214 | return { |
|
215 | 215 | 'SERVER_NAME': http_host_only_stub(), |
|
216 | 216 | 'SERVER_PORT': http_host_stub.split(':')[1], |
|
217 | 217 | 'HTTP_HOST': http_host_stub, |
|
218 | 218 | 'HTTP_USER_AGENT': 'rc-test-agent', |
|
219 | 219 | 'REQUEST_METHOD': 'GET' |
|
220 | 220 | } |
|
221 | 221 | |
|
222 | 222 | |
|
223 | 223 | @pytest.fixture(scope='session') |
|
224 | 224 | def baseapp(ini_config, vcsserver, http_environ_session): |
|
225 | 225 | from rhodecode.lib.pyramid_utils import get_app_config |
|
226 | 226 | from rhodecode.config.middleware import make_pyramid_app |
|
227 | 227 | |
|
228 | 228 | print("Using the RhodeCode configuration:{}".format(ini_config)) |
|
229 | 229 | pyramid.paster.setup_logging(ini_config) |
|
230 | 230 | |
|
231 | 231 | settings = get_app_config(ini_config) |
|
232 | 232 | app = make_pyramid_app({'__file__': ini_config}, **settings) |
|
233 | 233 | |
|
234 | 234 | return app |
|
235 | 235 | |
|
236 | 236 | |
|
237 | 237 | @pytest.fixture(scope='function') |
|
238 | 238 | def app(request, config_stub, baseapp, http_environ): |
|
239 | 239 | app = CustomTestApp( |
|
240 | 240 | baseapp, |
|
241 | 241 | extra_environ=http_environ) |
|
242 | 242 | if request.cls: |
|
243 | 243 | request.cls.app = app |
|
244 | 244 | return app |
|
245 | 245 | |
|
246 | 246 | |
|
247 | 247 | @pytest.fixture(scope='session') |
|
248 | 248 | def app_settings(baseapp, ini_config): |
|
249 | 249 | """ |
|
250 | 250 | Settings dictionary used to create the app. |
|
251 | 251 | |
|
252 | 252 | Parses the ini file and passes the result through the sanitize and apply |
|
253 | 253 | defaults mechanism in `rhodecode.config.middleware`. |
|
254 | 254 | """ |
|
255 | 255 | return baseapp.config.get_settings() |
|
256 | 256 | |
|
257 | 257 | |
|
258 | 258 | @pytest.fixture(scope='session') |
|
259 | 259 | def db_connection(ini_settings): |
|
260 | 260 | # Initialize the database connection. |
|
261 | 261 | config_utils.initialize_database(ini_settings) |
|
262 | 262 | |
|
263 | 263 | |
|
264 | 264 | LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user')) |
|
265 | 265 | |
|
266 | 266 | |
|
267 | 267 | def _autologin_user(app, *args): |
|
268 | 268 | session = login_user_session(app, *args) |
|
269 | 269 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) |
|
270 | 270 | return LoginData(csrf_token, session['rhodecode_user']) |
|
271 | 271 | |
|
272 | 272 | |
|
273 | 273 | @pytest.fixture |
|
274 | 274 | def autologin_user(app): |
|
275 | 275 | """ |
|
276 | 276 | Utility fixture which makes sure that the admin user is logged in |
|
277 | 277 | """ |
|
278 | 278 | return _autologin_user(app) |
|
279 | 279 | |
|
280 | 280 | |
|
281 | 281 | @pytest.fixture |
|
282 | 282 | def autologin_regular_user(app): |
|
283 | 283 | """ |
|
284 | 284 | Utility fixture which makes sure that the regular user is logged in |
|
285 | 285 | """ |
|
286 | 286 | return _autologin_user( |
|
287 | 287 | app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
288 | 288 | |
|
289 | 289 | |
|
290 | 290 | @pytest.fixture(scope='function') |
|
291 | 291 | def csrf_token(request, autologin_user): |
|
292 | 292 | return autologin_user.csrf_token |
|
293 | 293 | |
|
294 | 294 | |
|
295 | 295 | @pytest.fixture(scope='function') |
|
296 | 296 | def xhr_header(request): |
|
297 | 297 | return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'} |
|
298 | 298 | |
|
299 | 299 | |
|
300 | 300 | @pytest.fixture |
|
301 | 301 | def real_crypto_backend(monkeypatch): |
|
302 | 302 | """ |
|
303 | 303 | Switch the production crypto backend on for this test. |
|
304 | 304 | |
|
305 | 305 | During the test run the crypto backend is replaced with a faster |
|
306 | 306 | implementation based on the MD5 algorithm. |
|
307 | 307 | """ |
|
308 | 308 | monkeypatch.setattr(rhodecode, 'is_test', False) |
|
309 | 309 | |
|
310 | 310 | |
|
311 | 311 | @pytest.fixture(scope='class') |
|
312 | 312 | def index_location(request, baseapp): |
|
313 | 313 | index_location = baseapp.config.get_settings()['search.location'] |
|
314 | 314 | if request.cls: |
|
315 | 315 | request.cls.index_location = index_location |
|
316 | 316 | return index_location |
|
317 | 317 | |
|
318 | 318 | |
|
319 | 319 | @pytest.fixture(scope='session', autouse=True) |
|
320 | 320 | def tests_tmp_path(request): |
|
321 | 321 | """ |
|
322 | 322 | Create temporary directory to be used during the test session. |
|
323 | 323 | """ |
|
324 | 324 | if not os.path.exists(TESTS_TMP_PATH): |
|
325 | 325 | os.makedirs(TESTS_TMP_PATH) |
|
326 | 326 | |
|
327 | 327 | if not request.config.getoption('--keep-tmp-path'): |
|
328 | 328 | @request.addfinalizer |
|
329 | 329 | def remove_tmp_path(): |
|
330 | 330 | shutil.rmtree(TESTS_TMP_PATH) |
|
331 | 331 | |
|
332 | 332 | return TESTS_TMP_PATH |
|
333 | 333 | |
|
334 | 334 | |
|
335 | 335 | @pytest.fixture |
|
336 | 336 | def test_repo_group(request): |
|
337 | 337 | """ |
|
338 | 338 | Create a temporary repository group, and destroy it after |
|
339 | 339 | usage automatically |
|
340 | 340 | """ |
|
341 | 341 | fixture = Fixture() |
|
342 | 342 | repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '') |
|
343 | 343 | repo_group = fixture.create_repo_group(repogroupid) |
|
344 | 344 | |
|
345 | 345 | def _cleanup(): |
|
346 | 346 | fixture.destroy_repo_group(repogroupid) |
|
347 | 347 | |
|
348 | 348 | request.addfinalizer(_cleanup) |
|
349 | 349 | return repo_group |
|
350 | 350 | |
|
351 | 351 | |
|
352 | 352 | @pytest.fixture |
|
353 | 353 | def test_user_group(request): |
|
354 | 354 | """ |
|
355 | 355 | Create a temporary user group, and destroy it after |
|
356 | 356 | usage automatically |
|
357 | 357 | """ |
|
358 | 358 | fixture = Fixture() |
|
359 | 359 | usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '') |
|
360 | 360 | user_group = fixture.create_user_group(usergroupid) |
|
361 | 361 | |
|
362 | 362 | def _cleanup(): |
|
363 | 363 | fixture.destroy_user_group(user_group) |
|
364 | 364 | |
|
365 | 365 | request.addfinalizer(_cleanup) |
|
366 | 366 | return user_group |
|
367 | 367 | |
|
368 | 368 | |
|
369 | 369 | @pytest.fixture(scope='session') |
|
370 | 370 | def test_repo(request): |
|
371 | 371 | container = TestRepoContainer() |
|
372 | 372 | request.addfinalizer(container._cleanup) |
|
373 | 373 | return container |
|
374 | 374 | |
|
375 | 375 | |
|
376 | 376 | class TestRepoContainer(object): |
|
377 | 377 | """ |
|
378 | 378 | Container for test repositories which are used read only. |
|
379 | 379 | |
|
380 | 380 | Repositories will be created on demand and re-used during the lifetime |
|
381 | 381 | of this object. |
|
382 | 382 | |
|
383 | 383 | Usage to get the svn test repository "minimal":: |
|
384 | 384 | |
|
385 | 385 | test_repo = TestContainer() |
|
386 | 386 | repo = test_repo('minimal', 'svn') |
|
387 | 387 | |
|
388 | 388 | """ |
|
389 | 389 | |
|
390 | 390 | dump_extractors = { |
|
391 | 391 | 'git': utils.extract_git_repo_from_dump, |
|
392 | 392 | 'hg': utils.extract_hg_repo_from_dump, |
|
393 | 393 | 'svn': utils.extract_svn_repo_from_dump, |
|
394 | 394 | } |
|
395 | 395 | |
|
396 | 396 | def __init__(self): |
|
397 | 397 | self._cleanup_repos = [] |
|
398 | 398 | self._fixture = Fixture() |
|
399 | 399 | self._repos = {} |
|
400 | 400 | |
|
401 | 401 | def __call__(self, dump_name, backend_alias, config=None): |
|
402 | 402 | key = (dump_name, backend_alias) |
|
403 | 403 | if key not in self._repos: |
|
404 | 404 | repo = self._create_repo(dump_name, backend_alias, config) |
|
405 | 405 | self._repos[key] = repo.repo_id |
|
406 | 406 | return Repository.get(self._repos[key]) |
|
407 | 407 | |
|
408 | 408 | def _create_repo(self, dump_name, backend_alias, config): |
|
409 | 409 | repo_name = '%s-%s' % (backend_alias, dump_name) |
|
410 | 410 | backend_class = get_backend(backend_alias) |
|
411 | 411 | dump_extractor = self.dump_extractors[backend_alias] |
|
412 | 412 | repo_path = dump_extractor(dump_name, repo_name) |
|
413 | 413 | |
|
414 | 414 | vcs_repo = backend_class(repo_path, config=config) |
|
415 | 415 | repo2db_mapper({repo_name: vcs_repo}) |
|
416 | 416 | |
|
417 | 417 | repo = RepoModel().get_by_repo_name(repo_name) |
|
418 | 418 | self._cleanup_repos.append(repo_name) |
|
419 | 419 | return repo |
|
420 | 420 | |
|
421 | 421 | def _cleanup(self): |
|
422 | 422 | for repo_name in reversed(self._cleanup_repos): |
|
423 | 423 | self._fixture.destroy_repo(repo_name) |
|
424 | 424 | |
|
425 | 425 | |
|
426 | 426 | @pytest.fixture |
|
427 | 427 | def backend(request, backend_alias, baseapp, test_repo): |
|
428 | 428 | """ |
|
429 | 429 | Parametrized fixture which represents a single backend implementation. |
|
430 | 430 | |
|
431 | 431 | It respects the option `--backends` to focus the test run on specific |
|
432 | 432 | backend implementations. |
|
433 | 433 | |
|
434 | 434 | It also supports `pytest.mark.xfail_backends` to mark tests as failing |
|
435 | 435 | for specific backends. This is intended as a utility for incremental |
|
436 | 436 | development of a new backend implementation. |
|
437 | 437 | """ |
|
438 | 438 | if backend_alias not in request.config.getoption('--backends'): |
|
439 | 439 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
440 | 440 | |
|
441 | 441 | utils.check_xfail_backends(request.node, backend_alias) |
|
442 | 442 | utils.check_skip_backends(request.node, backend_alias) |
|
443 | 443 | |
|
444 | 444 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
445 | 445 | backend = Backend( |
|
446 | 446 | alias=backend_alias, |
|
447 | 447 | repo_name=repo_name, |
|
448 | 448 | test_name=request.node.name, |
|
449 | 449 | test_repo_container=test_repo) |
|
450 | 450 | request.addfinalizer(backend.cleanup) |
|
451 | 451 | return backend |
|
452 | 452 | |
|
453 | 453 | |
|
454 | 454 | @pytest.fixture |
|
455 | 455 | def backend_git(request, baseapp, test_repo): |
|
456 | 456 | return backend(request, 'git', baseapp, test_repo) |
|
457 | 457 | |
|
458 | 458 | |
|
459 | 459 | @pytest.fixture |
|
460 | 460 | def backend_hg(request, baseapp, test_repo): |
|
461 | 461 | return backend(request, 'hg', baseapp, test_repo) |
|
462 | 462 | |
|
463 | 463 | |
|
464 | 464 | @pytest.fixture |
|
465 | 465 | def backend_svn(request, baseapp, test_repo): |
|
466 | 466 | return backend(request, 'svn', baseapp, test_repo) |
|
467 | 467 | |
|
468 | 468 | |
|
469 | 469 | @pytest.fixture |
|
470 | 470 | def backend_random(backend_git): |
|
471 | 471 | """ |
|
472 | 472 | Use this to express that your tests need "a backend. |
|
473 | 473 | |
|
474 | 474 | A few of our tests need a backend, so that we can run the code. This |
|
475 | 475 | fixture is intended to be used for such cases. It will pick one of the |
|
476 | 476 | backends and run the tests. |
|
477 | 477 | |
|
478 | 478 | The fixture `backend` would run the test multiple times for each |
|
479 | 479 | available backend which is a pure waste of time if the test is |
|
480 | 480 | independent of the backend type. |
|
481 | 481 | """ |
|
482 | 482 | # TODO: johbo: Change this to pick a random backend |
|
483 | 483 | return backend_git |
|
484 | 484 | |
|
485 | 485 | |
|
486 | 486 | @pytest.fixture |
|
487 | 487 | def backend_stub(backend_git): |
|
488 | 488 | """ |
|
489 | 489 | Use this to express that your tests need a backend stub |
|
490 | 490 | |
|
491 | 491 | TODO: mikhail: Implement a real stub logic instead of returning |
|
492 | 492 | a git backend |
|
493 | 493 | """ |
|
494 | 494 | return backend_git |
|
495 | 495 | |
|
496 | 496 | |
|
497 | 497 | @pytest.fixture |
|
498 | 498 | def repo_stub(backend_stub): |
|
499 | 499 | """ |
|
500 | 500 | Use this to express that your tests need a repository stub |
|
501 | 501 | """ |
|
502 | 502 | return backend_stub.create_repo() |
|
503 | 503 | |
|
504 | 504 | |
|
505 | 505 | class Backend(object): |
|
506 | 506 | """ |
|
507 | 507 | Represents the test configuration for one supported backend |
|
508 | 508 | |
|
509 | 509 | Provides easy access to different test repositories based on |
|
510 | 510 | `__getitem__`. Such repositories will only be created once per test |
|
511 | 511 | session. |
|
512 | 512 | """ |
|
513 | 513 | |
|
514 | 514 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
515 | 515 | _master_repo = None |
|
516 | 516 | _commit_ids = {} |
|
517 | 517 | |
|
518 | 518 | def __init__(self, alias, repo_name, test_name, test_repo_container): |
|
519 | 519 | self.alias = alias |
|
520 | 520 | self.repo_name = repo_name |
|
521 | 521 | self._cleanup_repos = [] |
|
522 | 522 | self._test_name = test_name |
|
523 | 523 | self._test_repo_container = test_repo_container |
|
524 | 524 | # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or |
|
525 | 525 | # Fixture will survive in the end. |
|
526 | 526 | self._fixture = Fixture() |
|
527 | 527 | |
|
528 | 528 | def __getitem__(self, key): |
|
529 | 529 | return self._test_repo_container(key, self.alias) |
|
530 | 530 | |
|
531 | 531 | def create_test_repo(self, key, config=None): |
|
532 | 532 | return self._test_repo_container(key, self.alias, config) |
|
533 | 533 | |
|
534 | 534 | @property |
|
535 | 535 | def repo(self): |
|
536 | 536 | """ |
|
537 | 537 | Returns the "current" repository. This is the vcs_test repo or the |
|
538 | 538 | last repo which has been created with `create_repo`. |
|
539 | 539 | """ |
|
540 | 540 | from rhodecode.model.db import Repository |
|
541 | 541 | return Repository.get_by_repo_name(self.repo_name) |
|
542 | 542 | |
|
543 | 543 | @property |
|
544 | 544 | def default_branch_name(self): |
|
545 | 545 | VcsRepository = get_backend(self.alias) |
|
546 | 546 | return VcsRepository.DEFAULT_BRANCH_NAME |
|
547 | 547 | |
|
548 | 548 | @property |
|
549 | 549 | def default_head_id(self): |
|
550 | 550 | """ |
|
551 | 551 | Returns the default head id of the underlying backend. |
|
552 | 552 | |
|
553 | 553 | This will be the default branch name in case the backend does have a |
|
554 | 554 | default branch. In the other cases it will point to a valid head |
|
555 | 555 | which can serve as the base to create a new commit on top of it. |
|
556 | 556 | """ |
|
557 | 557 | vcsrepo = self.repo.scm_instance() |
|
558 | 558 | head_id = ( |
|
559 | 559 | vcsrepo.DEFAULT_BRANCH_NAME or |
|
560 | 560 | vcsrepo.commit_ids[-1]) |
|
561 | 561 | return head_id |
|
562 | 562 | |
|
563 | 563 | @property |
|
564 | 564 | def commit_ids(self): |
|
565 | 565 | """ |
|
566 | 566 | Returns the list of commits for the last created repository |
|
567 | 567 | """ |
|
568 | 568 | return self._commit_ids |
|
569 | 569 | |
|
570 | 570 | def create_master_repo(self, commits): |
|
571 | 571 | """ |
|
572 | 572 | Create a repository and remember it as a template. |
|
573 | 573 | |
|
574 | 574 | This allows to easily create derived repositories to construct |
|
575 | 575 | more complex scenarios for diff, compare and pull requests. |
|
576 | 576 | |
|
577 | 577 | Returns a commit map which maps from commit message to raw_id. |
|
578 | 578 | """ |
|
579 | 579 | self._master_repo = self.create_repo(commits=commits) |
|
580 | 580 | return self._commit_ids |
|
581 | 581 | |
|
582 | 582 | def create_repo( |
|
583 | 583 | self, commits=None, number_of_commits=0, heads=None, |
|
584 | name_suffix=u'', **kwargs): | |
|
584 | name_suffix=u'', bare=False, **kwargs): | |
|
585 | 585 | """ |
|
586 | 586 | Create a repository and record it for later cleanup. |
|
587 | 587 | |
|
588 | 588 | :param commits: Optional. A sequence of dict instances. |
|
589 | 589 | Will add a commit per entry to the new repository. |
|
590 | 590 | :param number_of_commits: Optional. If set to a number, this number of |
|
591 | 591 | commits will be added to the new repository. |
|
592 | 592 | :param heads: Optional. Can be set to a sequence of of commit |
|
593 | 593 | names which shall be pulled in from the master repository. |
|
594 | ||
|
594 | :param name_suffix: adds special suffix to generated repo name | |
|
595 | :param bare: set a repo as bare (no checkout) | |
|
595 | 596 | """ |
|
596 | 597 | self.repo_name = self._next_repo_name() + name_suffix |
|
597 | 598 | repo = self._fixture.create_repo( |
|
598 | self.repo_name, repo_type=self.alias, **kwargs) | |
|
599 | self.repo_name, repo_type=self.alias, bare=bare, **kwargs) | |
|
599 | 600 | self._cleanup_repos.append(repo.repo_name) |
|
600 | 601 | |
|
601 | 602 | commits = commits or [ |
|
602 | 603 | {'message': 'Commit %s of %s' % (x, self.repo_name)} |
|
603 |
for x in |
|
|
604 | for x in range(number_of_commits)] | |
|
604 | 605 | self._add_commits_to_repo(repo.scm_instance(), commits) |
|
605 | 606 | if heads: |
|
606 | 607 | self.pull_heads(repo, heads) |
|
607 | 608 | |
|
608 | 609 | return repo |
|
609 | 610 | |
|
610 | 611 | def pull_heads(self, repo, heads): |
|
611 | 612 | """ |
|
612 | 613 | Make sure that repo contains all commits mentioned in `heads` |
|
613 | 614 | """ |
|
614 | 615 | vcsmaster = self._master_repo.scm_instance() |
|
615 | 616 | vcsrepo = repo.scm_instance() |
|
616 | 617 | vcsrepo.config.clear_section('hooks') |
|
617 | 618 | commit_ids = [self._commit_ids[h] for h in heads] |
|
618 | 619 | vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids) |
|
619 | 620 | |
|
620 | 621 | def create_fork(self): |
|
621 | 622 | repo_to_fork = self.repo_name |
|
622 | 623 | self.repo_name = self._next_repo_name() |
|
623 | 624 | repo = self._fixture.create_fork(repo_to_fork, self.repo_name) |
|
624 | 625 | self._cleanup_repos.append(self.repo_name) |
|
625 | 626 | return repo |
|
626 | 627 | |
|
627 | 628 | def new_repo_name(self, suffix=u''): |
|
628 | 629 | self.repo_name = self._next_repo_name() + suffix |
|
629 | 630 | self._cleanup_repos.append(self.repo_name) |
|
630 | 631 | return self.repo_name |
|
631 | 632 | |
|
632 | 633 | def _next_repo_name(self): |
|
633 | 634 | return u"%s_%s" % ( |
|
634 | 635 | self.invalid_repo_name.sub(u'_', self._test_name), |
|
635 | 636 | len(self._cleanup_repos)) |
|
636 | 637 | |
|
637 | 638 | def ensure_file(self, filename, content='Test content\n'): |
|
638 | 639 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
639 | 640 | commits = [ |
|
640 | 641 | {'added': [ |
|
641 | 642 | FileNode(filename, content=content), |
|
642 | 643 | ]}, |
|
643 | 644 | ] |
|
644 | 645 | self._add_commits_to_repo(self.repo.scm_instance(), commits) |
|
645 | 646 | |
|
646 | 647 | def enable_downloads(self): |
|
647 | 648 | repo = self.repo |
|
648 | 649 | repo.enable_downloads = True |
|
649 | 650 | Session().add(repo) |
|
650 | 651 | Session().commit() |
|
651 | 652 | |
|
652 | 653 | def cleanup(self): |
|
653 | 654 | for repo_name in reversed(self._cleanup_repos): |
|
654 | 655 | self._fixture.destroy_repo(repo_name) |
|
655 | 656 | |
|
656 | 657 | def _add_commits_to_repo(self, repo, commits): |
|
657 | 658 | commit_ids = _add_commits_to_repo(repo, commits) |
|
658 | 659 | if not commit_ids: |
|
659 | 660 | return |
|
660 | 661 | self._commit_ids = commit_ids |
|
661 | 662 | |
|
662 | 663 | # Creating refs for Git to allow fetching them from remote repository |
|
663 | 664 | if self.alias == 'git': |
|
664 | 665 | refs = {} |
|
665 | 666 | for message in self._commit_ids: |
|
666 | 667 | # TODO: mikhail: do more special chars replacements |
|
667 | 668 | ref_name = 'refs/test-refs/{}'.format( |
|
668 | 669 | message.replace(' ', '')) |
|
669 | 670 | refs[ref_name] = self._commit_ids[message] |
|
670 | 671 | self._create_refs(repo, refs) |
|
671 | 672 | |
|
672 | 673 | def _create_refs(self, repo, refs): |
|
673 | 674 | for ref_name in refs: |
|
674 | 675 | repo.set_refs(ref_name, refs[ref_name]) |
|
675 | 676 | |
|
676 | 677 | |
|
677 | 678 | @pytest.fixture |
|
678 | 679 | def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo): |
|
679 | 680 | """ |
|
680 | 681 | Parametrized fixture which represents a single vcs backend implementation. |
|
681 | 682 | |
|
682 | 683 | See the fixture `backend` for more details. This one implements the same |
|
683 | 684 | concept, but on vcs level. So it does not provide model instances etc. |
|
684 | 685 | |
|
685 | 686 | Parameters are generated dynamically, see :func:`pytest_generate_tests` |
|
686 | 687 | for how this works. |
|
687 | 688 | """ |
|
688 | 689 | if backend_alias not in request.config.getoption('--backends'): |
|
689 | 690 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
690 | 691 | |
|
691 | 692 | utils.check_xfail_backends(request.node, backend_alias) |
|
692 | 693 | utils.check_skip_backends(request.node, backend_alias) |
|
693 | 694 | |
|
694 | 695 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
695 | 696 | repo_path = os.path.join(tests_tmp_path, repo_name) |
|
696 | 697 | backend = VcsBackend( |
|
697 | 698 | alias=backend_alias, |
|
698 | 699 | repo_path=repo_path, |
|
699 | 700 | test_name=request.node.name, |
|
700 | 701 | test_repo_container=test_repo) |
|
701 | 702 | request.addfinalizer(backend.cleanup) |
|
702 | 703 | return backend |
|
703 | 704 | |
|
704 | 705 | |
|
705 | 706 | @pytest.fixture |
|
706 | 707 | def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo): |
|
707 | 708 | return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo) |
|
708 | 709 | |
|
709 | 710 | |
|
710 | 711 | @pytest.fixture |
|
711 | 712 | def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo): |
|
712 | 713 | return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo) |
|
713 | 714 | |
|
714 | 715 | |
|
715 | 716 | @pytest.fixture |
|
716 | 717 | def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo): |
|
717 | 718 | return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo) |
|
718 | 719 | |
|
719 | 720 | |
|
720 | 721 | @pytest.fixture |
|
721 | 722 | def vcsbackend_random(vcsbackend_git): |
|
722 | 723 | """ |
|
723 | 724 | Use this to express that your tests need "a vcsbackend". |
|
724 | 725 | |
|
725 | 726 | The fixture `vcsbackend` would run the test multiple times for each |
|
726 | 727 | available vcs backend which is a pure waste of time if the test is |
|
727 | 728 | independent of the vcs backend type. |
|
728 | 729 | """ |
|
729 | 730 | # TODO: johbo: Change this to pick a random backend |
|
730 | 731 | return vcsbackend_git |
|
731 | 732 | |
|
732 | 733 | |
|
733 | 734 | @pytest.fixture |
|
734 | 735 | def vcsbackend_stub(vcsbackend_git): |
|
735 | 736 | """ |
|
736 | 737 | Use this to express that your test just needs a stub of a vcsbackend. |
|
737 | 738 | |
|
738 | 739 | Plan is to eventually implement an in-memory stub to speed tests up. |
|
739 | 740 | """ |
|
740 | 741 | return vcsbackend_git |
|
741 | 742 | |
|
742 | 743 | |
|
743 | 744 | class VcsBackend(object): |
|
744 | 745 | """ |
|
745 | 746 | Represents the test configuration for one supported vcs backend. |
|
746 | 747 | """ |
|
747 | 748 | |
|
748 | 749 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
749 | 750 | |
|
750 | 751 | def __init__(self, alias, repo_path, test_name, test_repo_container): |
|
751 | 752 | self.alias = alias |
|
752 | 753 | self._repo_path = repo_path |
|
753 | 754 | self._cleanup_repos = [] |
|
754 | 755 | self._test_name = test_name |
|
755 | 756 | self._test_repo_container = test_repo_container |
|
756 | 757 | |
|
757 | 758 | def __getitem__(self, key): |
|
758 | 759 | return self._test_repo_container(key, self.alias).scm_instance() |
|
759 | 760 | |
|
760 | 761 | @property |
|
761 | 762 | def repo(self): |
|
762 | 763 | """ |
|
763 | 764 | Returns the "current" repository. This is the vcs_test repo of the last |
|
764 | 765 | repo which has been created. |
|
765 | 766 | """ |
|
766 | 767 | Repository = get_backend(self.alias) |
|
767 | 768 | return Repository(self._repo_path) |
|
768 | 769 | |
|
769 | 770 | @property |
|
770 | 771 | def backend(self): |
|
771 | 772 | """ |
|
772 | 773 | Returns the backend implementation class. |
|
773 | 774 | """ |
|
774 | 775 | return get_backend(self.alias) |
|
775 | 776 | |
|
776 |
def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None |
|
|
777 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, | |
|
778 | bare=False): | |
|
777 | 779 | repo_name = self._next_repo_name() |
|
778 | 780 | self._repo_path = get_new_dir(repo_name) |
|
779 | 781 | repo_class = get_backend(self.alias) |
|
780 | 782 | src_url = None |
|
781 | 783 | if _clone_repo: |
|
782 | 784 | src_url = _clone_repo.path |
|
783 | repo = repo_class(self._repo_path, create=True, src_url=src_url) | |
|
785 | repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare) | |
|
784 | 786 | self._cleanup_repos.append(repo) |
|
785 | 787 | |
|
786 | 788 | commits = commits or [ |
|
787 | 789 | {'message': 'Commit %s of %s' % (x, repo_name)} |
|
788 | 790 | for x in xrange(number_of_commits)] |
|
789 | 791 | _add_commits_to_repo(repo, commits) |
|
790 | 792 | return repo |
|
791 | 793 | |
|
792 | 794 | def clone_repo(self, repo): |
|
793 | 795 | return self.create_repo(_clone_repo=repo) |
|
794 | 796 | |
|
795 | 797 | def cleanup(self): |
|
796 | 798 | for repo in self._cleanup_repos: |
|
797 | 799 | shutil.rmtree(repo.path) |
|
798 | 800 | |
|
799 | 801 | def new_repo_path(self): |
|
800 | 802 | repo_name = self._next_repo_name() |
|
801 | 803 | self._repo_path = get_new_dir(repo_name) |
|
802 | 804 | return self._repo_path |
|
803 | 805 | |
|
804 | 806 | def _next_repo_name(self): |
|
805 | 807 | return "%s_%s" % ( |
|
806 | 808 | self.invalid_repo_name.sub('_', self._test_name), |
|
807 | 809 | len(self._cleanup_repos)) |
|
808 | 810 | |
|
809 | 811 | def add_file(self, repo, filename, content='Test content\n'): |
|
810 | 812 | imc = repo.in_memory_commit |
|
811 | 813 | imc.add(FileNode(filename, content=content)) |
|
812 | 814 | imc.commit( |
|
813 | 815 | message=u'Automatic commit from vcsbackend fixture', |
|
814 | 816 | author=u'Automatic') |
|
815 | 817 | |
|
816 | 818 | def ensure_file(self, filename, content='Test content\n'): |
|
817 | 819 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
818 | 820 | self.add_file(self.repo, filename, content) |
|
819 | 821 | |
|
820 | 822 | |
|
821 | 823 | def _add_commits_to_repo(vcs_repo, commits): |
|
822 | 824 | commit_ids = {} |
|
823 | 825 | if not commits: |
|
824 | 826 | return commit_ids |
|
825 | 827 | |
|
826 | 828 | imc = vcs_repo.in_memory_commit |
|
827 | 829 | commit = None |
|
828 | 830 | |
|
829 | 831 | for idx, commit in enumerate(commits): |
|
830 | 832 | message = unicode(commit.get('message', 'Commit %s' % idx)) |
|
831 | 833 | |
|
832 | 834 | for node in commit.get('added', []): |
|
833 | 835 | imc.add(FileNode(node.path, content=node.content)) |
|
834 | 836 | for node in commit.get('changed', []): |
|
835 | 837 | imc.change(FileNode(node.path, content=node.content)) |
|
836 | 838 | for node in commit.get('removed', []): |
|
837 | 839 | imc.remove(FileNode(node.path)) |
|
838 | 840 | |
|
839 | 841 | parents = [ |
|
840 | 842 | vcs_repo.get_commit(commit_id=commit_ids[p]) |
|
841 | 843 | for p in commit.get('parents', [])] |
|
842 | 844 | |
|
843 | 845 | operations = ('added', 'changed', 'removed') |
|
844 | 846 | if not any((commit.get(o) for o in operations)): |
|
845 | 847 | imc.add(FileNode('file_%s' % idx, content=message)) |
|
846 | 848 | |
|
847 | 849 | commit = imc.commit( |
|
848 | 850 | message=message, |
|
849 | 851 | author=unicode(commit.get('author', 'Automatic')), |
|
850 | 852 | date=commit.get('date'), |
|
851 | 853 | branch=commit.get('branch'), |
|
852 | 854 | parents=parents) |
|
853 | 855 | |
|
854 | 856 | commit_ids[commit.message] = commit.raw_id |
|
855 | 857 | |
|
856 | 858 | return commit_ids |
|
857 | 859 | |
|
858 | 860 | |
|
859 | 861 | @pytest.fixture |
|
860 | 862 | def reposerver(request): |
|
861 | 863 | """ |
|
862 | 864 | Allows to serve a backend repository |
|
863 | 865 | """ |
|
864 | 866 | |
|
865 | 867 | repo_server = RepoServer() |
|
866 | 868 | request.addfinalizer(repo_server.cleanup) |
|
867 | 869 | return repo_server |
|
868 | 870 | |
|
869 | 871 | |
|
870 | 872 | class RepoServer(object): |
|
871 | 873 | """ |
|
872 | 874 | Utility to serve a local repository for the duration of a test case. |
|
873 | 875 | |
|
874 | 876 | Supports only Subversion so far. |
|
875 | 877 | """ |
|
876 | 878 | |
|
877 | 879 | url = None |
|
878 | 880 | |
|
879 | 881 | def __init__(self): |
|
880 | 882 | self._cleanup_servers = [] |
|
881 | 883 | |
|
882 | 884 | def serve(self, vcsrepo): |
|
883 | 885 | if vcsrepo.alias != 'svn': |
|
884 | 886 | raise TypeError("Backend %s not supported" % vcsrepo.alias) |
|
885 | 887 | |
|
886 | 888 | proc = subprocess32.Popen( |
|
887 | 889 | ['svnserve', '-d', '--foreground', '--listen-host', 'localhost', |
|
888 | 890 | '--root', vcsrepo.path]) |
|
889 | 891 | self._cleanup_servers.append(proc) |
|
890 | 892 | self.url = 'svn://localhost' |
|
891 | 893 | |
|
892 | 894 | def cleanup(self): |
|
893 | 895 | for proc in self._cleanup_servers: |
|
894 | 896 | proc.terminate() |
|
895 | 897 | |
|
896 | 898 | |
|
897 | 899 | @pytest.fixture |
|
898 | 900 | def pr_util(backend, request, config_stub): |
|
899 | 901 | """ |
|
900 | 902 | Utility for tests of models and for functional tests around pull requests. |
|
901 | 903 | |
|
902 | 904 | It gives an instance of :class:`PRTestUtility` which provides various |
|
903 | 905 | utility methods around one pull request. |
|
904 | 906 | |
|
905 | 907 | This fixture uses `backend` and inherits its parameterization. |
|
906 | 908 | """ |
|
907 | 909 | |
|
908 | 910 | util = PRTestUtility(backend) |
|
909 | 911 | request.addfinalizer(util.cleanup) |
|
910 | 912 | |
|
911 | 913 | return util |
|
912 | 914 | |
|
913 | 915 | |
|
914 | 916 | class PRTestUtility(object): |
|
915 | 917 | |
|
916 | 918 | pull_request = None |
|
917 | 919 | pull_request_id = None |
|
918 | 920 | mergeable_patcher = None |
|
919 | 921 | mergeable_mock = None |
|
920 | 922 | notification_patcher = None |
|
921 | 923 | |
|
922 | 924 | def __init__(self, backend): |
|
923 | 925 | self.backend = backend |
|
924 | 926 | |
|
925 | 927 | def create_pull_request( |
|
926 | 928 | self, commits=None, target_head=None, source_head=None, |
|
927 | 929 | revisions=None, approved=False, author=None, mergeable=False, |
|
928 | 930 | enable_notifications=True, name_suffix=u'', reviewers=None, |
|
929 | 931 | title=u"Test", description=u"Description"): |
|
930 | 932 | self.set_mergeable(mergeable) |
|
931 | 933 | if not enable_notifications: |
|
932 | 934 | # mock notification side effect |
|
933 | 935 | self.notification_patcher = mock.patch( |
|
934 | 936 | 'rhodecode.model.notification.NotificationModel.create') |
|
935 | 937 | self.notification_patcher.start() |
|
936 | 938 | |
|
937 | 939 | if not self.pull_request: |
|
938 | 940 | if not commits: |
|
939 | 941 | commits = [ |
|
940 | 942 | {'message': 'c1'}, |
|
941 | 943 | {'message': 'c2'}, |
|
942 | 944 | {'message': 'c3'}, |
|
943 | 945 | ] |
|
944 | 946 | target_head = 'c1' |
|
945 | 947 | source_head = 'c2' |
|
946 | 948 | revisions = ['c2'] |
|
947 | 949 | |
|
948 | 950 | self.commit_ids = self.backend.create_master_repo(commits) |
|
949 | 951 | self.target_repository = self.backend.create_repo( |
|
950 | 952 | heads=[target_head], name_suffix=name_suffix) |
|
951 | 953 | self.source_repository = self.backend.create_repo( |
|
952 | 954 | heads=[source_head], name_suffix=name_suffix) |
|
953 | 955 | self.author = author or UserModel().get_by_username( |
|
954 | 956 | TEST_USER_ADMIN_LOGIN) |
|
955 | 957 | |
|
956 | 958 | model = PullRequestModel() |
|
957 | 959 | self.create_parameters = { |
|
958 | 960 | 'created_by': self.author, |
|
959 | 961 | 'source_repo': self.source_repository.repo_name, |
|
960 | 962 | 'source_ref': self._default_branch_reference(source_head), |
|
961 | 963 | 'target_repo': self.target_repository.repo_name, |
|
962 | 964 | 'target_ref': self._default_branch_reference(target_head), |
|
963 | 965 | 'revisions': [self.commit_ids[r] for r in revisions], |
|
964 | 966 | 'reviewers': reviewers or self._get_reviewers(), |
|
965 | 967 | 'title': title, |
|
966 | 968 | 'description': description, |
|
967 | 969 | } |
|
968 | 970 | self.pull_request = model.create(**self.create_parameters) |
|
969 | 971 | assert model.get_versions(self.pull_request) == [] |
|
970 | 972 | |
|
971 | 973 | self.pull_request_id = self.pull_request.pull_request_id |
|
972 | 974 | |
|
973 | 975 | if approved: |
|
974 | 976 | self.approve() |
|
975 | 977 | |
|
976 | 978 | Session().add(self.pull_request) |
|
977 | 979 | Session().commit() |
|
978 | 980 | |
|
979 | 981 | return self.pull_request |
|
980 | 982 | |
|
981 | 983 | def approve(self): |
|
982 | 984 | self.create_status_votes( |
|
983 | 985 | ChangesetStatus.STATUS_APPROVED, |
|
984 | 986 | *self.pull_request.reviewers) |
|
985 | 987 | |
|
986 | 988 | def close(self): |
|
987 | 989 | PullRequestModel().close_pull_request(self.pull_request, self.author) |
|
988 | 990 | |
|
989 | 991 | def _default_branch_reference(self, commit_message): |
|
990 | 992 | reference = '%s:%s:%s' % ( |
|
991 | 993 | 'branch', |
|
992 | 994 | self.backend.default_branch_name, |
|
993 | 995 | self.commit_ids[commit_message]) |
|
994 | 996 | return reference |
|
995 | 997 | |
|
996 | 998 | def _get_reviewers(self): |
|
997 | 999 | return [ |
|
998 | 1000 | (TEST_USER_REGULAR_LOGIN, ['default1'], False, []), |
|
999 | 1001 | (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []), |
|
1000 | 1002 | ] |
|
1001 | 1003 | |
|
1002 | 1004 | def update_source_repository(self, head=None): |
|
1003 | 1005 | heads = [head or 'c3'] |
|
1004 | 1006 | self.backend.pull_heads(self.source_repository, heads=heads) |
|
1005 | 1007 | |
|
1006 | 1008 | def add_one_commit(self, head=None): |
|
1007 | 1009 | self.update_source_repository(head=head) |
|
1008 | 1010 | old_commit_ids = set(self.pull_request.revisions) |
|
1009 | 1011 | PullRequestModel().update_commits(self.pull_request) |
|
1010 | 1012 | commit_ids = set(self.pull_request.revisions) |
|
1011 | 1013 | new_commit_ids = commit_ids - old_commit_ids |
|
1012 | 1014 | assert len(new_commit_ids) == 1 |
|
1013 | 1015 | return new_commit_ids.pop() |
|
1014 | 1016 | |
|
1015 | 1017 | def remove_one_commit(self): |
|
1016 | 1018 | assert len(self.pull_request.revisions) == 2 |
|
1017 | 1019 | source_vcs = self.source_repository.scm_instance() |
|
1018 | 1020 | removed_commit_id = source_vcs.commit_ids[-1] |
|
1019 | 1021 | |
|
1020 | 1022 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, |
|
1021 | 1023 | # remove the if once that's sorted out. |
|
1022 | 1024 | if self.backend.alias == "git": |
|
1023 | 1025 | kwargs = {'branch_name': self.backend.default_branch_name} |
|
1024 | 1026 | else: |
|
1025 | 1027 | kwargs = {} |
|
1026 | 1028 | source_vcs.strip(removed_commit_id, **kwargs) |
|
1027 | 1029 | |
|
1028 | 1030 | PullRequestModel().update_commits(self.pull_request) |
|
1029 | 1031 | assert len(self.pull_request.revisions) == 1 |
|
1030 | 1032 | return removed_commit_id |
|
1031 | 1033 | |
|
1032 | 1034 | def create_comment(self, linked_to=None): |
|
1033 | 1035 | comment = CommentsModel().create( |
|
1034 | 1036 | text=u"Test comment", |
|
1035 | 1037 | repo=self.target_repository.repo_name, |
|
1036 | 1038 | user=self.author, |
|
1037 | 1039 | pull_request=self.pull_request) |
|
1038 | 1040 | assert comment.pull_request_version_id is None |
|
1039 | 1041 | |
|
1040 | 1042 | if linked_to: |
|
1041 | 1043 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1042 | 1044 | |
|
1043 | 1045 | return comment |
|
1044 | 1046 | |
|
1045 | 1047 | def create_inline_comment( |
|
1046 | 1048 | self, linked_to=None, line_no=u'n1', file_path='file_1'): |
|
1047 | 1049 | comment = CommentsModel().create( |
|
1048 | 1050 | text=u"Test comment", |
|
1049 | 1051 | repo=self.target_repository.repo_name, |
|
1050 | 1052 | user=self.author, |
|
1051 | 1053 | line_no=line_no, |
|
1052 | 1054 | f_path=file_path, |
|
1053 | 1055 | pull_request=self.pull_request) |
|
1054 | 1056 | assert comment.pull_request_version_id is None |
|
1055 | 1057 | |
|
1056 | 1058 | if linked_to: |
|
1057 | 1059 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1058 | 1060 | |
|
1059 | 1061 | return comment |
|
1060 | 1062 | |
|
1061 | 1063 | def create_version_of_pull_request(self): |
|
1062 | 1064 | pull_request = self.create_pull_request() |
|
1063 | 1065 | version = PullRequestModel()._create_version_from_snapshot( |
|
1064 | 1066 | pull_request) |
|
1065 | 1067 | return version |
|
1066 | 1068 | |
|
1067 | 1069 | def create_status_votes(self, status, *reviewers): |
|
1068 | 1070 | for reviewer in reviewers: |
|
1069 | 1071 | ChangesetStatusModel().set_status( |
|
1070 | 1072 | repo=self.pull_request.target_repo, |
|
1071 | 1073 | status=status, |
|
1072 | 1074 | user=reviewer.user_id, |
|
1073 | 1075 | pull_request=self.pull_request) |
|
1074 | 1076 | |
|
1075 | 1077 | def set_mergeable(self, value): |
|
1076 | 1078 | if not self.mergeable_patcher: |
|
1077 | 1079 | self.mergeable_patcher = mock.patch.object( |
|
1078 | 1080 | VcsSettingsModel, 'get_general_settings') |
|
1079 | 1081 | self.mergeable_mock = self.mergeable_patcher.start() |
|
1080 | 1082 | self.mergeable_mock.return_value = { |
|
1081 | 1083 | 'rhodecode_pr_merge_enabled': value} |
|
1082 | 1084 | |
|
1083 | 1085 | def cleanup(self): |
|
1084 | 1086 | # In case the source repository is already cleaned up, the pull |
|
1085 | 1087 | # request will already be deleted. |
|
1086 | 1088 | pull_request = PullRequest().get(self.pull_request_id) |
|
1087 | 1089 | if pull_request: |
|
1088 | 1090 | PullRequestModel().delete(pull_request, pull_request.author) |
|
1089 | 1091 | Session().commit() |
|
1090 | 1092 | |
|
1091 | 1093 | if self.notification_patcher: |
|
1092 | 1094 | self.notification_patcher.stop() |
|
1093 | 1095 | |
|
1094 | 1096 | if self.mergeable_patcher: |
|
1095 | 1097 | self.mergeable_patcher.stop() |
|
1096 | 1098 | |
|
1097 | 1099 | |
|
1098 | 1100 | @pytest.fixture |
|
1099 | 1101 | def user_admin(baseapp): |
|
1100 | 1102 | """ |
|
1101 | 1103 | Provides the default admin test user as an instance of `db.User`. |
|
1102 | 1104 | """ |
|
1103 | 1105 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1104 | 1106 | return user |
|
1105 | 1107 | |
|
1106 | 1108 | |
|
1107 | 1109 | @pytest.fixture |
|
1108 | 1110 | def user_regular(baseapp): |
|
1109 | 1111 | """ |
|
1110 | 1112 | Provides the default regular test user as an instance of `db.User`. |
|
1111 | 1113 | """ |
|
1112 | 1114 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) |
|
1113 | 1115 | return user |
|
1114 | 1116 | |
|
1115 | 1117 | |
|
1116 | 1118 | @pytest.fixture |
|
1117 | 1119 | def user_util(request, db_connection): |
|
1118 | 1120 | """ |
|
1119 | 1121 | Provides a wired instance of `UserUtility` with integrated cleanup. |
|
1120 | 1122 | """ |
|
1121 | 1123 | utility = UserUtility(test_name=request.node.name) |
|
1122 | 1124 | request.addfinalizer(utility.cleanup) |
|
1123 | 1125 | return utility |
|
1124 | 1126 | |
|
1125 | 1127 | |
|
1126 | 1128 | # TODO: johbo: Split this up into utilities per domain or something similar |
|
1127 | 1129 | class UserUtility(object): |
|
1128 | 1130 | |
|
1129 | 1131 | def __init__(self, test_name="test"): |
|
1130 | 1132 | self._test_name = self._sanitize_name(test_name) |
|
1131 | 1133 | self.fixture = Fixture() |
|
1132 | 1134 | self.repo_group_ids = [] |
|
1133 | 1135 | self.repos_ids = [] |
|
1134 | 1136 | self.user_ids = [] |
|
1135 | 1137 | self.user_group_ids = [] |
|
1136 | 1138 | self.user_repo_permission_ids = [] |
|
1137 | 1139 | self.user_group_repo_permission_ids = [] |
|
1138 | 1140 | self.user_repo_group_permission_ids = [] |
|
1139 | 1141 | self.user_group_repo_group_permission_ids = [] |
|
1140 | 1142 | self.user_user_group_permission_ids = [] |
|
1141 | 1143 | self.user_group_user_group_permission_ids = [] |
|
1142 | 1144 | self.user_permissions = [] |
|
1143 | 1145 | |
|
1144 | 1146 | def _sanitize_name(self, name): |
|
1145 | 1147 | for char in ['[', ']']: |
|
1146 | 1148 | name = name.replace(char, '_') |
|
1147 | 1149 | return name |
|
1148 | 1150 | |
|
1149 | 1151 | def create_repo_group( |
|
1150 | 1152 | self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): |
|
1151 | 1153 | group_name = "{prefix}_repogroup_{count}".format( |
|
1152 | 1154 | prefix=self._test_name, |
|
1153 | 1155 | count=len(self.repo_group_ids)) |
|
1154 | 1156 | repo_group = self.fixture.create_repo_group( |
|
1155 | 1157 | group_name, cur_user=owner) |
|
1156 | 1158 | if auto_cleanup: |
|
1157 | 1159 | self.repo_group_ids.append(repo_group.group_id) |
|
1158 | 1160 | return repo_group |
|
1159 | 1161 | |
|
1160 | 1162 | def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, |
|
1161 | auto_cleanup=True, repo_type='hg'): | |
|
1163 | auto_cleanup=True, repo_type='hg', bare=False): | |
|
1162 | 1164 | repo_name = "{prefix}_repository_{count}".format( |
|
1163 | 1165 | prefix=self._test_name, |
|
1164 | 1166 | count=len(self.repos_ids)) |
|
1165 | 1167 | |
|
1166 | 1168 | repository = self.fixture.create_repo( |
|
1167 | repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type) | |
|
1169 | repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare) | |
|
1168 | 1170 | if auto_cleanup: |
|
1169 | 1171 | self.repos_ids.append(repository.repo_id) |
|
1170 | 1172 | return repository |
|
1171 | 1173 | |
|
1172 | 1174 | def create_user(self, auto_cleanup=True, **kwargs): |
|
1173 | 1175 | user_name = "{prefix}_user_{count}".format( |
|
1174 | 1176 | prefix=self._test_name, |
|
1175 | 1177 | count=len(self.user_ids)) |
|
1176 | 1178 | user = self.fixture.create_user(user_name, **kwargs) |
|
1177 | 1179 | if auto_cleanup: |
|
1178 | 1180 | self.user_ids.append(user.user_id) |
|
1179 | 1181 | return user |
|
1180 | 1182 | |
|
1181 | 1183 | def create_additional_user_email(self, user, email): |
|
1182 | 1184 | uem = self.fixture.create_additional_user_email(user=user, email=email) |
|
1183 | 1185 | return uem |
|
1184 | 1186 | |
|
1185 | 1187 | def create_user_with_group(self): |
|
1186 | 1188 | user = self.create_user() |
|
1187 | 1189 | user_group = self.create_user_group(members=[user]) |
|
1188 | 1190 | return user, user_group |
|
1189 | 1191 | |
|
1190 | 1192 | def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, |
|
1191 | 1193 | auto_cleanup=True, **kwargs): |
|
1192 | 1194 | group_name = "{prefix}_usergroup_{count}".format( |
|
1193 | 1195 | prefix=self._test_name, |
|
1194 | 1196 | count=len(self.user_group_ids)) |
|
1195 | 1197 | user_group = self.fixture.create_user_group( |
|
1196 | 1198 | group_name, cur_user=owner, **kwargs) |
|
1197 | 1199 | |
|
1198 | 1200 | if auto_cleanup: |
|
1199 | 1201 | self.user_group_ids.append(user_group.users_group_id) |
|
1200 | 1202 | if members: |
|
1201 | 1203 | for user in members: |
|
1202 | 1204 | UserGroupModel().add_user_to_group(user_group, user) |
|
1203 | 1205 | return user_group |
|
1204 | 1206 | |
|
1205 | 1207 | def grant_user_permission(self, user_name, permission_name): |
|
1206 | 1208 | self._inherit_default_user_permissions(user_name, False) |
|
1207 | 1209 | self.user_permissions.append((user_name, permission_name)) |
|
1208 | 1210 | |
|
1209 | 1211 | def grant_user_permission_to_repo_group( |
|
1210 | 1212 | self, repo_group, user, permission_name): |
|
1211 | 1213 | permission = RepoGroupModel().grant_user_permission( |
|
1212 | 1214 | repo_group, user, permission_name) |
|
1213 | 1215 | self.user_repo_group_permission_ids.append( |
|
1214 | 1216 | (repo_group.group_id, user.user_id)) |
|
1215 | 1217 | return permission |
|
1216 | 1218 | |
|
1217 | 1219 | def grant_user_group_permission_to_repo_group( |
|
1218 | 1220 | self, repo_group, user_group, permission_name): |
|
1219 | 1221 | permission = RepoGroupModel().grant_user_group_permission( |
|
1220 | 1222 | repo_group, user_group, permission_name) |
|
1221 | 1223 | self.user_group_repo_group_permission_ids.append( |
|
1222 | 1224 | (repo_group.group_id, user_group.users_group_id)) |
|
1223 | 1225 | return permission |
|
1224 | 1226 | |
|
1225 | 1227 | def grant_user_permission_to_repo( |
|
1226 | 1228 | self, repo, user, permission_name): |
|
1227 | 1229 | permission = RepoModel().grant_user_permission( |
|
1228 | 1230 | repo, user, permission_name) |
|
1229 | 1231 | self.user_repo_permission_ids.append( |
|
1230 | 1232 | (repo.repo_id, user.user_id)) |
|
1231 | 1233 | return permission |
|
1232 | 1234 | |
|
1233 | 1235 | def grant_user_group_permission_to_repo( |
|
1234 | 1236 | self, repo, user_group, permission_name): |
|
1235 | 1237 | permission = RepoModel().grant_user_group_permission( |
|
1236 | 1238 | repo, user_group, permission_name) |
|
1237 | 1239 | self.user_group_repo_permission_ids.append( |
|
1238 | 1240 | (repo.repo_id, user_group.users_group_id)) |
|
1239 | 1241 | return permission |
|
1240 | 1242 | |
|
1241 | 1243 | def grant_user_permission_to_user_group( |
|
1242 | 1244 | self, target_user_group, user, permission_name): |
|
1243 | 1245 | permission = UserGroupModel().grant_user_permission( |
|
1244 | 1246 | target_user_group, user, permission_name) |
|
1245 | 1247 | self.user_user_group_permission_ids.append( |
|
1246 | 1248 | (target_user_group.users_group_id, user.user_id)) |
|
1247 | 1249 | return permission |
|
1248 | 1250 | |
|
1249 | 1251 | def grant_user_group_permission_to_user_group( |
|
1250 | 1252 | self, target_user_group, user_group, permission_name): |
|
1251 | 1253 | permission = UserGroupModel().grant_user_group_permission( |
|
1252 | 1254 | target_user_group, user_group, permission_name) |
|
1253 | 1255 | self.user_group_user_group_permission_ids.append( |
|
1254 | 1256 | (target_user_group.users_group_id, user_group.users_group_id)) |
|
1255 | 1257 | return permission |
|
1256 | 1258 | |
|
1257 | 1259 | def revoke_user_permission(self, user_name, permission_name): |
|
1258 | 1260 | self._inherit_default_user_permissions(user_name, True) |
|
1259 | 1261 | UserModel().revoke_perm(user_name, permission_name) |
|
1260 | 1262 | |
|
1261 | 1263 | def _inherit_default_user_permissions(self, user_name, value): |
|
1262 | 1264 | user = UserModel().get_by_username(user_name) |
|
1263 | 1265 | user.inherit_default_permissions = value |
|
1264 | 1266 | Session().add(user) |
|
1265 | 1267 | Session().commit() |
|
1266 | 1268 | |
|
1267 | 1269 | def cleanup(self): |
|
1268 | 1270 | self._cleanup_permissions() |
|
1269 | 1271 | self._cleanup_repos() |
|
1270 | 1272 | self._cleanup_repo_groups() |
|
1271 | 1273 | self._cleanup_user_groups() |
|
1272 | 1274 | self._cleanup_users() |
|
1273 | 1275 | |
|
1274 | 1276 | def _cleanup_permissions(self): |
|
1275 | 1277 | if self.user_permissions: |
|
1276 | 1278 | for user_name, permission_name in self.user_permissions: |
|
1277 | 1279 | self.revoke_user_permission(user_name, permission_name) |
|
1278 | 1280 | |
|
1279 | 1281 | for permission in self.user_repo_permission_ids: |
|
1280 | 1282 | RepoModel().revoke_user_permission(*permission) |
|
1281 | 1283 | |
|
1282 | 1284 | for permission in self.user_group_repo_permission_ids: |
|
1283 | 1285 | RepoModel().revoke_user_group_permission(*permission) |
|
1284 | 1286 | |
|
1285 | 1287 | for permission in self.user_repo_group_permission_ids: |
|
1286 | 1288 | RepoGroupModel().revoke_user_permission(*permission) |
|
1287 | 1289 | |
|
1288 | 1290 | for permission in self.user_group_repo_group_permission_ids: |
|
1289 | 1291 | RepoGroupModel().revoke_user_group_permission(*permission) |
|
1290 | 1292 | |
|
1291 | 1293 | for permission in self.user_user_group_permission_ids: |
|
1292 | 1294 | UserGroupModel().revoke_user_permission(*permission) |
|
1293 | 1295 | |
|
1294 | 1296 | for permission in self.user_group_user_group_permission_ids: |
|
1295 | 1297 | UserGroupModel().revoke_user_group_permission(*permission) |
|
1296 | 1298 | |
|
1297 | 1299 | def _cleanup_repo_groups(self): |
|
1298 | 1300 | def _repo_group_compare(first_group_id, second_group_id): |
|
1299 | 1301 | """ |
|
1300 | 1302 | Gives higher priority to the groups with the most complex paths |
|
1301 | 1303 | """ |
|
1302 | 1304 | first_group = RepoGroup.get(first_group_id) |
|
1303 | 1305 | second_group = RepoGroup.get(second_group_id) |
|
1304 | 1306 | first_group_parts = ( |
|
1305 | 1307 | len(first_group.group_name.split('/')) if first_group else 0) |
|
1306 | 1308 | second_group_parts = ( |
|
1307 | 1309 | len(second_group.group_name.split('/')) if second_group else 0) |
|
1308 | 1310 | return cmp(second_group_parts, first_group_parts) |
|
1309 | 1311 | |
|
1310 | 1312 | sorted_repo_group_ids = sorted( |
|
1311 | 1313 | self.repo_group_ids, cmp=_repo_group_compare) |
|
1312 | 1314 | for repo_group_id in sorted_repo_group_ids: |
|
1313 | 1315 | self.fixture.destroy_repo_group(repo_group_id) |
|
1314 | 1316 | |
|
1315 | 1317 | def _cleanup_repos(self): |
|
1316 | 1318 | sorted_repos_ids = sorted(self.repos_ids) |
|
1317 | 1319 | for repo_id in sorted_repos_ids: |
|
1318 | 1320 | self.fixture.destroy_repo(repo_id) |
|
1319 | 1321 | |
|
1320 | 1322 | def _cleanup_user_groups(self): |
|
1321 | 1323 | def _user_group_compare(first_group_id, second_group_id): |
|
1322 | 1324 | """ |
|
1323 | 1325 | Gives higher priority to the groups with the most complex paths |
|
1324 | 1326 | """ |
|
1325 | 1327 | first_group = UserGroup.get(first_group_id) |
|
1326 | 1328 | second_group = UserGroup.get(second_group_id) |
|
1327 | 1329 | first_group_parts = ( |
|
1328 | 1330 | len(first_group.users_group_name.split('/')) |
|
1329 | 1331 | if first_group else 0) |
|
1330 | 1332 | second_group_parts = ( |
|
1331 | 1333 | len(second_group.users_group_name.split('/')) |
|
1332 | 1334 | if second_group else 0) |
|
1333 | 1335 | return cmp(second_group_parts, first_group_parts) |
|
1334 | 1336 | |
|
1335 | 1337 | sorted_user_group_ids = sorted( |
|
1336 | 1338 | self.user_group_ids, cmp=_user_group_compare) |
|
1337 | 1339 | for user_group_id in sorted_user_group_ids: |
|
1338 | 1340 | self.fixture.destroy_user_group(user_group_id) |
|
1339 | 1341 | |
|
1340 | 1342 | def _cleanup_users(self): |
|
1341 | 1343 | for user_id in self.user_ids: |
|
1342 | 1344 | self.fixture.destroy_user(user_id) |
|
1343 | 1345 | |
|
1344 | 1346 | |
|
1345 | 1347 | # TODO: Think about moving this into a pytest-pyro package and make it a |
|
1346 | 1348 | # pytest plugin |
|
1347 | 1349 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) |
|
1348 | 1350 | def pytest_runtest_makereport(item, call): |
|
1349 | 1351 | """ |
|
1350 | 1352 | Adding the remote traceback if the exception has this information. |
|
1351 | 1353 | |
|
1352 | 1354 | VCSServer attaches this information as the attribute `_vcs_server_traceback` |
|
1353 | 1355 | to the exception instance. |
|
1354 | 1356 | """ |
|
1355 | 1357 | outcome = yield |
|
1356 | 1358 | report = outcome.get_result() |
|
1357 | 1359 | if call.excinfo: |
|
1358 | 1360 | _add_vcsserver_remote_traceback(report, call.excinfo.value) |
|
1359 | 1361 | |
|
1360 | 1362 | |
|
1361 | 1363 | def _add_vcsserver_remote_traceback(report, exc): |
|
1362 | 1364 | vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None) |
|
1363 | 1365 | |
|
1364 | 1366 | if vcsserver_traceback: |
|
1365 | 1367 | section = 'VCSServer remote traceback ' + report.when |
|
1366 | 1368 | report.sections.append((section, vcsserver_traceback)) |
|
1367 | 1369 | |
|
1368 | 1370 | |
|
1369 | 1371 | @pytest.fixture(scope='session') |
|
1370 | 1372 | def testrun(): |
|
1371 | 1373 | return { |
|
1372 | 1374 | 'uuid': uuid.uuid4(), |
|
1373 | 1375 | 'start': datetime.datetime.utcnow().isoformat(), |
|
1374 | 1376 | 'timestamp': int(time.time()), |
|
1375 | 1377 | } |
|
1376 | 1378 | |
|
1377 | 1379 | |
|
1378 | 1380 | @pytest.fixture(autouse=True) |
|
1379 | 1381 | def collect_appenlight_stats(request, testrun): |
|
1380 | 1382 | """ |
|
1381 | 1383 | This fixture reports memory consumtion of single tests. |
|
1382 | 1384 | |
|
1383 | 1385 | It gathers data based on `psutil` and sends them to Appenlight. The option |
|
1384 | 1386 | ``--ae`` has te be used to enable this fixture and the API key for your |
|
1385 | 1387 | application has to be provided in ``--ae-key``. |
|
1386 | 1388 | """ |
|
1387 | 1389 | try: |
|
1388 | 1390 | # cygwin cannot have yet psutil support. |
|
1389 | 1391 | import psutil |
|
1390 | 1392 | except ImportError: |
|
1391 | 1393 | return |
|
1392 | 1394 | |
|
1393 | 1395 | if not request.config.getoption('--appenlight'): |
|
1394 | 1396 | return |
|
1395 | 1397 | else: |
|
1396 | 1398 | # Only request the baseapp fixture if appenlight tracking is |
|
1397 | 1399 | # enabled. This will speed up a test run of unit tests by 2 to 3 |
|
1398 | 1400 | # seconds if appenlight is not enabled. |
|
1399 | 1401 | baseapp = request.getfuncargvalue("baseapp") |
|
1400 | 1402 | url = '{}/api/logs'.format(request.config.getoption('--appenlight-url')) |
|
1401 | 1403 | client = AppenlightClient( |
|
1402 | 1404 | url=url, |
|
1403 | 1405 | api_key=request.config.getoption('--appenlight-api-key'), |
|
1404 | 1406 | namespace=request.node.nodeid, |
|
1405 | 1407 | request=str(testrun['uuid']), |
|
1406 | 1408 | testrun=testrun) |
|
1407 | 1409 | |
|
1408 | 1410 | client.collect({ |
|
1409 | 1411 | 'message': "Starting", |
|
1410 | 1412 | }) |
|
1411 | 1413 | |
|
1412 | 1414 | server_and_port = baseapp.config.get_settings()['vcs.server'] |
|
1413 | 1415 | protocol = baseapp.config.get_settings()['vcs.server.protocol'] |
|
1414 | 1416 | server = create_vcsserver_proxy(server_and_port, protocol) |
|
1415 | 1417 | with server: |
|
1416 | 1418 | vcs_pid = server.get_pid() |
|
1417 | 1419 | server.run_gc() |
|
1418 | 1420 | vcs_process = psutil.Process(vcs_pid) |
|
1419 | 1421 | mem = vcs_process.memory_info() |
|
1420 | 1422 | client.tag_before('vcsserver.rss', mem.rss) |
|
1421 | 1423 | client.tag_before('vcsserver.vms', mem.vms) |
|
1422 | 1424 | |
|
1423 | 1425 | test_process = psutil.Process() |
|
1424 | 1426 | mem = test_process.memory_info() |
|
1425 | 1427 | client.tag_before('test.rss', mem.rss) |
|
1426 | 1428 | client.tag_before('test.vms', mem.vms) |
|
1427 | 1429 | |
|
1428 | 1430 | client.tag_before('time', time.time()) |
|
1429 | 1431 | |
|
1430 | 1432 | @request.addfinalizer |
|
1431 | 1433 | def send_stats(): |
|
1432 | 1434 | client.tag_after('time', time.time()) |
|
1433 | 1435 | with server: |
|
1434 | 1436 | gc_stats = server.run_gc() |
|
1435 | 1437 | for tag, value in gc_stats.items(): |
|
1436 | 1438 | client.tag_after(tag, value) |
|
1437 | 1439 | mem = vcs_process.memory_info() |
|
1438 | 1440 | client.tag_after('vcsserver.rss', mem.rss) |
|
1439 | 1441 | client.tag_after('vcsserver.vms', mem.vms) |
|
1440 | 1442 | |
|
1441 | 1443 | mem = test_process.memory_info() |
|
1442 | 1444 | client.tag_after('test.rss', mem.rss) |
|
1443 | 1445 | client.tag_after('test.vms', mem.vms) |
|
1444 | 1446 | |
|
1445 | 1447 | client.collect({ |
|
1446 | 1448 | 'message': "Finished", |
|
1447 | 1449 | }) |
|
1448 | 1450 | client.send_stats() |
|
1449 | 1451 | |
|
1450 | 1452 | return client |
|
1451 | 1453 | |
|
1452 | 1454 | |
|
1453 | 1455 | class AppenlightClient(): |
|
1454 | 1456 | |
|
1455 | 1457 | url_template = '{url}?protocol_version=0.5' |
|
1456 | 1458 | |
|
1457 | 1459 | def __init__( |
|
1458 | 1460 | self, url, api_key, add_server=True, add_timestamp=True, |
|
1459 | 1461 | namespace=None, request=None, testrun=None): |
|
1460 | 1462 | self.url = self.url_template.format(url=url) |
|
1461 | 1463 | self.api_key = api_key |
|
1462 | 1464 | self.add_server = add_server |
|
1463 | 1465 | self.add_timestamp = add_timestamp |
|
1464 | 1466 | self.namespace = namespace |
|
1465 | 1467 | self.request = request |
|
1466 | 1468 | self.server = socket.getfqdn(socket.gethostname()) |
|
1467 | 1469 | self.tags_before = {} |
|
1468 | 1470 | self.tags_after = {} |
|
1469 | 1471 | self.stats = [] |
|
1470 | 1472 | self.testrun = testrun or {} |
|
1471 | 1473 | |
|
1472 | 1474 | def tag_before(self, tag, value): |
|
1473 | 1475 | self.tags_before[tag] = value |
|
1474 | 1476 | |
|
1475 | 1477 | def tag_after(self, tag, value): |
|
1476 | 1478 | self.tags_after[tag] = value |
|
1477 | 1479 | |
|
1478 | 1480 | def collect(self, data): |
|
1479 | 1481 | if self.add_server: |
|
1480 | 1482 | data.setdefault('server', self.server) |
|
1481 | 1483 | if self.add_timestamp: |
|
1482 | 1484 | data.setdefault('date', datetime.datetime.utcnow().isoformat()) |
|
1483 | 1485 | if self.namespace: |
|
1484 | 1486 | data.setdefault('namespace', self.namespace) |
|
1485 | 1487 | if self.request: |
|
1486 | 1488 | data.setdefault('request', self.request) |
|
1487 | 1489 | self.stats.append(data) |
|
1488 | 1490 | |
|
1489 | 1491 | def send_stats(self): |
|
1490 | 1492 | tags = [ |
|
1491 | 1493 | ('testrun', self.request), |
|
1492 | 1494 | ('testrun.start', self.testrun['start']), |
|
1493 | 1495 | ('testrun.timestamp', self.testrun['timestamp']), |
|
1494 | 1496 | ('test', self.namespace), |
|
1495 | 1497 | ] |
|
1496 | 1498 | for key, value in self.tags_before.items(): |
|
1497 | 1499 | tags.append((key + '.before', value)) |
|
1498 | 1500 | try: |
|
1499 | 1501 | delta = self.tags_after[key] - value |
|
1500 | 1502 | tags.append((key + '.delta', delta)) |
|
1501 | 1503 | except Exception: |
|
1502 | 1504 | pass |
|
1503 | 1505 | for key, value in self.tags_after.items(): |
|
1504 | 1506 | tags.append((key + '.after', value)) |
|
1505 | 1507 | self.collect({ |
|
1506 | 1508 | 'message': "Collected tags", |
|
1507 | 1509 | 'tags': tags, |
|
1508 | 1510 | }) |
|
1509 | 1511 | |
|
1510 | 1512 | response = requests.post( |
|
1511 | 1513 | self.url, |
|
1512 | 1514 | headers={ |
|
1513 | 1515 | 'X-appenlight-api-key': self.api_key}, |
|
1514 | 1516 | json=self.stats, |
|
1515 | 1517 | ) |
|
1516 | 1518 | |
|
1517 | 1519 | if not response.status_code == 200: |
|
1518 | 1520 | pprint.pprint(self.stats) |
|
1519 | 1521 | print(response.headers) |
|
1520 | 1522 | print(response.text) |
|
1521 | 1523 | raise Exception('Sending to appenlight failed') |
|
1522 | 1524 | |
|
1523 | 1525 | |
|
1524 | 1526 | @pytest.fixture |
|
1525 | 1527 | def gist_util(request, db_connection): |
|
1526 | 1528 | """ |
|
1527 | 1529 | Provides a wired instance of `GistUtility` with integrated cleanup. |
|
1528 | 1530 | """ |
|
1529 | 1531 | utility = GistUtility() |
|
1530 | 1532 | request.addfinalizer(utility.cleanup) |
|
1531 | 1533 | return utility |
|
1532 | 1534 | |
|
1533 | 1535 | |
|
1534 | 1536 | class GistUtility(object): |
|
1535 | 1537 | def __init__(self): |
|
1536 | 1538 | self.fixture = Fixture() |
|
1537 | 1539 | self.gist_ids = [] |
|
1538 | 1540 | |
|
1539 | 1541 | def create_gist(self, **kwargs): |
|
1540 | 1542 | gist = self.fixture.create_gist(**kwargs) |
|
1541 | 1543 | self.gist_ids.append(gist.gist_id) |
|
1542 | 1544 | return gist |
|
1543 | 1545 | |
|
1544 | 1546 | def cleanup(self): |
|
1545 | 1547 | for id_ in self.gist_ids: |
|
1546 | 1548 | self.fixture.destroy_gists(str(id_)) |
|
1547 | 1549 | |
|
1548 | 1550 | |
|
1549 | 1551 | @pytest.fixture |
|
1550 | 1552 | def enabled_backends(request): |
|
1551 | 1553 | backends = request.config.option.backends |
|
1552 | 1554 | return backends[:] |
|
1553 | 1555 | |
|
1554 | 1556 | |
|
1555 | 1557 | @pytest.fixture |
|
1556 | 1558 | def settings_util(request, db_connection): |
|
1557 | 1559 | """ |
|
1558 | 1560 | Provides a wired instance of `SettingsUtility` with integrated cleanup. |
|
1559 | 1561 | """ |
|
1560 | 1562 | utility = SettingsUtility() |
|
1561 | 1563 | request.addfinalizer(utility.cleanup) |
|
1562 | 1564 | return utility |
|
1563 | 1565 | |
|
1564 | 1566 | |
|
1565 | 1567 | class SettingsUtility(object): |
|
1566 | 1568 | def __init__(self): |
|
1567 | 1569 | self.rhodecode_ui_ids = [] |
|
1568 | 1570 | self.rhodecode_setting_ids = [] |
|
1569 | 1571 | self.repo_rhodecode_ui_ids = [] |
|
1570 | 1572 | self.repo_rhodecode_setting_ids = [] |
|
1571 | 1573 | |
|
1572 | 1574 | def create_repo_rhodecode_ui( |
|
1573 | 1575 | self, repo, section, value, key=None, active=True, cleanup=True): |
|
1574 | 1576 | key = key or hashlib.sha1( |
|
1575 | 1577 | '{}{}{}'.format(section, value, repo.repo_id)).hexdigest() |
|
1576 | 1578 | |
|
1577 | 1579 | setting = RepoRhodeCodeUi() |
|
1578 | 1580 | setting.repository_id = repo.repo_id |
|
1579 | 1581 | setting.ui_section = section |
|
1580 | 1582 | setting.ui_value = value |
|
1581 | 1583 | setting.ui_key = key |
|
1582 | 1584 | setting.ui_active = active |
|
1583 | 1585 | Session().add(setting) |
|
1584 | 1586 | Session().commit() |
|
1585 | 1587 | |
|
1586 | 1588 | if cleanup: |
|
1587 | 1589 | self.repo_rhodecode_ui_ids.append(setting.ui_id) |
|
1588 | 1590 | return setting |
|
1589 | 1591 | |
|
1590 | 1592 | def create_rhodecode_ui( |
|
1591 | 1593 | self, section, value, key=None, active=True, cleanup=True): |
|
1592 | 1594 | key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest() |
|
1593 | 1595 | |
|
1594 | 1596 | setting = RhodeCodeUi() |
|
1595 | 1597 | setting.ui_section = section |
|
1596 | 1598 | setting.ui_value = value |
|
1597 | 1599 | setting.ui_key = key |
|
1598 | 1600 | setting.ui_active = active |
|
1599 | 1601 | Session().add(setting) |
|
1600 | 1602 | Session().commit() |
|
1601 | 1603 | |
|
1602 | 1604 | if cleanup: |
|
1603 | 1605 | self.rhodecode_ui_ids.append(setting.ui_id) |
|
1604 | 1606 | return setting |
|
1605 | 1607 | |
|
1606 | 1608 | def create_repo_rhodecode_setting( |
|
1607 | 1609 | self, repo, name, value, type_, cleanup=True): |
|
1608 | 1610 | setting = RepoRhodeCodeSetting( |
|
1609 | 1611 | repo.repo_id, key=name, val=value, type=type_) |
|
1610 | 1612 | Session().add(setting) |
|
1611 | 1613 | Session().commit() |
|
1612 | 1614 | |
|
1613 | 1615 | if cleanup: |
|
1614 | 1616 | self.repo_rhodecode_setting_ids.append(setting.app_settings_id) |
|
1615 | 1617 | return setting |
|
1616 | 1618 | |
|
1617 | 1619 | def create_rhodecode_setting(self, name, value, type_, cleanup=True): |
|
1618 | 1620 | setting = RhodeCodeSetting(key=name, val=value, type=type_) |
|
1619 | 1621 | Session().add(setting) |
|
1620 | 1622 | Session().commit() |
|
1621 | 1623 | |
|
1622 | 1624 | if cleanup: |
|
1623 | 1625 | self.rhodecode_setting_ids.append(setting.app_settings_id) |
|
1624 | 1626 | |
|
1625 | 1627 | return setting |
|
1626 | 1628 | |
|
1627 | 1629 | def cleanup(self): |
|
1628 | 1630 | for id_ in self.rhodecode_ui_ids: |
|
1629 | 1631 | setting = RhodeCodeUi.get(id_) |
|
1630 | 1632 | Session().delete(setting) |
|
1631 | 1633 | |
|
1632 | 1634 | for id_ in self.rhodecode_setting_ids: |
|
1633 | 1635 | setting = RhodeCodeSetting.get(id_) |
|
1634 | 1636 | Session().delete(setting) |
|
1635 | 1637 | |
|
1636 | 1638 | for id_ in self.repo_rhodecode_ui_ids: |
|
1637 | 1639 | setting = RepoRhodeCodeUi.get(id_) |
|
1638 | 1640 | Session().delete(setting) |
|
1639 | 1641 | |
|
1640 | 1642 | for id_ in self.repo_rhodecode_setting_ids: |
|
1641 | 1643 | setting = RepoRhodeCodeSetting.get(id_) |
|
1642 | 1644 | Session().delete(setting) |
|
1643 | 1645 | |
|
1644 | 1646 | Session().commit() |
|
1645 | 1647 | |
|
1646 | 1648 | |
|
1647 | 1649 | @pytest.fixture |
|
1648 | 1650 | def no_notifications(request): |
|
1649 | 1651 | notification_patcher = mock.patch( |
|
1650 | 1652 | 'rhodecode.model.notification.NotificationModel.create') |
|
1651 | 1653 | notification_patcher.start() |
|
1652 | 1654 | request.addfinalizer(notification_patcher.stop) |
|
1653 | 1655 | |
|
1654 | 1656 | |
|
1655 | 1657 | @pytest.fixture(scope='session') |
|
1656 | 1658 | def repeat(request): |
|
1657 | 1659 | """ |
|
1658 | 1660 | The number of repetitions is based on this fixture. |
|
1659 | 1661 | |
|
1660 | 1662 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the |
|
1661 | 1663 | tests are not too slow in our default test suite. |
|
1662 | 1664 | """ |
|
1663 | 1665 | return request.config.getoption('--repeat') |
|
1664 | 1666 | |
|
1665 | 1667 | |
|
1666 | 1668 | @pytest.fixture |
|
1667 | 1669 | def rhodecode_fixtures(): |
|
1668 | 1670 | return Fixture() |
|
1669 | 1671 | |
|
1670 | 1672 | |
|
1671 | 1673 | @pytest.fixture |
|
1672 | 1674 | def context_stub(): |
|
1673 | 1675 | """ |
|
1674 | 1676 | Stub context object. |
|
1675 | 1677 | """ |
|
1676 | 1678 | context = pyramid.testing.DummyResource() |
|
1677 | 1679 | return context |
|
1678 | 1680 | |
|
1679 | 1681 | |
|
1680 | 1682 | @pytest.fixture |
|
1681 | 1683 | def request_stub(): |
|
1682 | 1684 | """ |
|
1683 | 1685 | Stub request object. |
|
1684 | 1686 | """ |
|
1685 | 1687 | from rhodecode.lib.base import bootstrap_request |
|
1686 | 1688 | request = bootstrap_request(scheme='https') |
|
1687 | 1689 | return request |
|
1688 | 1690 | |
|
1689 | 1691 | |
|
1690 | 1692 | @pytest.fixture |
|
1691 | 1693 | def config_stub(request, request_stub): |
|
1692 | 1694 | """ |
|
1693 | 1695 | Set up pyramid.testing and return the Configurator. |
|
1694 | 1696 | """ |
|
1695 | 1697 | from rhodecode.lib.base import bootstrap_config |
|
1696 | 1698 | config = bootstrap_config(request=request_stub) |
|
1697 | 1699 | |
|
1698 | 1700 | @request.addfinalizer |
|
1699 | 1701 | def cleanup(): |
|
1700 | 1702 | pyramid.testing.tearDown() |
|
1701 | 1703 | |
|
1702 | 1704 | return config |
|
1703 | 1705 | |
|
1704 | 1706 | |
|
1705 | 1707 | @pytest.fixture |
|
1706 | 1708 | def StubIntegrationType(): |
|
1707 | 1709 | class _StubIntegrationType(IntegrationTypeBase): |
|
1708 | 1710 | """ Test integration type class """ |
|
1709 | 1711 | |
|
1710 | 1712 | key = 'test' |
|
1711 | 1713 | display_name = 'Test integration type' |
|
1712 | 1714 | description = 'A test integration type for testing' |
|
1713 | 1715 | |
|
1714 | 1716 | @classmethod |
|
1715 | 1717 | def icon(cls): |
|
1716 | 1718 | return 'test_icon_html_image' |
|
1717 | 1719 | |
|
1718 | 1720 | def __init__(self, settings): |
|
1719 | 1721 | super(_StubIntegrationType, self).__init__(settings) |
|
1720 | 1722 | self.sent_events = [] # for testing |
|
1721 | 1723 | |
|
1722 | 1724 | def send_event(self, event): |
|
1723 | 1725 | self.sent_events.append(event) |
|
1724 | 1726 | |
|
1725 | 1727 | def settings_schema(self): |
|
1726 | 1728 | class SettingsSchema(colander.Schema): |
|
1727 | 1729 | test_string_field = colander.SchemaNode( |
|
1728 | 1730 | colander.String(), |
|
1729 | 1731 | missing=colander.required, |
|
1730 | 1732 | title='test string field', |
|
1731 | 1733 | ) |
|
1732 | 1734 | test_int_field = colander.SchemaNode( |
|
1733 | 1735 | colander.Int(), |
|
1734 | 1736 | title='some integer setting', |
|
1735 | 1737 | ) |
|
1736 | 1738 | return SettingsSchema() |
|
1737 | 1739 | |
|
1738 | 1740 | |
|
1739 | 1741 | integration_type_registry.register_integration_type(_StubIntegrationType) |
|
1740 | 1742 | return _StubIntegrationType |
|
1741 | 1743 | |
|
1742 | 1744 | @pytest.fixture |
|
1743 | 1745 | def stub_integration_settings(): |
|
1744 | 1746 | return { |
|
1745 | 1747 | 'test_string_field': 'some data', |
|
1746 | 1748 | 'test_int_field': 100, |
|
1747 | 1749 | } |
|
1748 | 1750 | |
|
1749 | 1751 | |
|
1750 | 1752 | @pytest.fixture |
|
1751 | 1753 | def repo_integration_stub(request, repo_stub, StubIntegrationType, |
|
1752 | 1754 | stub_integration_settings): |
|
1753 | 1755 | integration = IntegrationModel().create( |
|
1754 | 1756 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1755 | 1757 | name='test repo integration', |
|
1756 | 1758 | repo=repo_stub, repo_group=None, child_repos_only=None) |
|
1757 | 1759 | |
|
1758 | 1760 | @request.addfinalizer |
|
1759 | 1761 | def cleanup(): |
|
1760 | 1762 | IntegrationModel().delete(integration) |
|
1761 | 1763 | |
|
1762 | 1764 | return integration |
|
1763 | 1765 | |
|
1764 | 1766 | |
|
1765 | 1767 | @pytest.fixture |
|
1766 | 1768 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, |
|
1767 | 1769 | stub_integration_settings): |
|
1768 | 1770 | integration = IntegrationModel().create( |
|
1769 | 1771 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1770 | 1772 | name='test repogroup integration', |
|
1771 | 1773 | repo=None, repo_group=test_repo_group, child_repos_only=True) |
|
1772 | 1774 | |
|
1773 | 1775 | @request.addfinalizer |
|
1774 | 1776 | def cleanup(): |
|
1775 | 1777 | IntegrationModel().delete(integration) |
|
1776 | 1778 | |
|
1777 | 1779 | return integration |
|
1778 | 1780 | |
|
1779 | 1781 | |
|
1780 | 1782 | @pytest.fixture |
|
1781 | 1783 | def repogroup_recursive_integration_stub(request, test_repo_group, |
|
1782 | 1784 | StubIntegrationType, stub_integration_settings): |
|
1783 | 1785 | integration = IntegrationModel().create( |
|
1784 | 1786 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1785 | 1787 | name='test recursive repogroup integration', |
|
1786 | 1788 | repo=None, repo_group=test_repo_group, child_repos_only=False) |
|
1787 | 1789 | |
|
1788 | 1790 | @request.addfinalizer |
|
1789 | 1791 | def cleanup(): |
|
1790 | 1792 | IntegrationModel().delete(integration) |
|
1791 | 1793 | |
|
1792 | 1794 | return integration |
|
1793 | 1795 | |
|
1794 | 1796 | |
|
1795 | 1797 | @pytest.fixture |
|
1796 | 1798 | def global_integration_stub(request, StubIntegrationType, |
|
1797 | 1799 | stub_integration_settings): |
|
1798 | 1800 | integration = IntegrationModel().create( |
|
1799 | 1801 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1800 | 1802 | name='test global integration', |
|
1801 | 1803 | repo=None, repo_group=None, child_repos_only=None) |
|
1802 | 1804 | |
|
1803 | 1805 | @request.addfinalizer |
|
1804 | 1806 | def cleanup(): |
|
1805 | 1807 | IntegrationModel().delete(integration) |
|
1806 | 1808 | |
|
1807 | 1809 | return integration |
|
1808 | 1810 | |
|
1809 | 1811 | |
|
1810 | 1812 | @pytest.fixture |
|
1811 | 1813 | def root_repos_integration_stub(request, StubIntegrationType, |
|
1812 | 1814 | stub_integration_settings): |
|
1813 | 1815 | integration = IntegrationModel().create( |
|
1814 | 1816 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1815 | 1817 | name='test global integration', |
|
1816 | 1818 | repo=None, repo_group=None, child_repos_only=True) |
|
1817 | 1819 | |
|
1818 | 1820 | @request.addfinalizer |
|
1819 | 1821 | def cleanup(): |
|
1820 | 1822 | IntegrationModel().delete(integration) |
|
1821 | 1823 | |
|
1822 | 1824 | return integration |
|
1823 | 1825 | |
|
1824 | 1826 | |
|
1825 | 1827 | @pytest.fixture |
|
1826 | 1828 | def local_dt_to_utc(): |
|
1827 | 1829 | def _factory(dt): |
|
1828 | 1830 | return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone( |
|
1829 | 1831 | dateutil.tz.tzutc()).replace(tzinfo=None) |
|
1830 | 1832 | return _factory |
|
1831 | 1833 | |
|
1832 | 1834 | |
|
1833 | 1835 | @pytest.fixture |
|
1834 | 1836 | def disable_anonymous_user(request, baseapp): |
|
1835 | 1837 | set_anonymous_access(False) |
|
1836 | 1838 | |
|
1837 | 1839 | @request.addfinalizer |
|
1838 | 1840 | def cleanup(): |
|
1839 | 1841 | set_anonymous_access(True) |
|
1840 | 1842 | |
|
1841 | 1843 | |
|
1842 | 1844 | @pytest.fixture(scope='module') |
|
1843 | 1845 | def rc_fixture(request): |
|
1844 | 1846 | return Fixture() |
|
1845 | 1847 | |
|
1846 | 1848 | |
|
1847 | 1849 | @pytest.fixture |
|
1848 | 1850 | def repo_groups(request): |
|
1849 | 1851 | fixture = Fixture() |
|
1850 | 1852 | |
|
1851 | 1853 | session = Session() |
|
1852 | 1854 | zombie_group = fixture.create_repo_group('zombie') |
|
1853 | 1855 | parent_group = fixture.create_repo_group('parent') |
|
1854 | 1856 | child_group = fixture.create_repo_group('parent/child') |
|
1855 | 1857 | groups_in_db = session.query(RepoGroup).all() |
|
1856 | 1858 | assert len(groups_in_db) == 3 |
|
1857 | 1859 | assert child_group.group_parent_id == parent_group.group_id |
|
1858 | 1860 | |
|
1859 | 1861 | @request.addfinalizer |
|
1860 | 1862 | def cleanup(): |
|
1861 | 1863 | fixture.destroy_repo_group(zombie_group) |
|
1862 | 1864 | fixture.destroy_repo_group(child_group) |
|
1863 | 1865 | fixture.destroy_repo_group(parent_group) |
|
1864 | 1866 | |
|
1865 | 1867 | return zombie_group, parent_group, child_group |
@@ -1,1289 +1,1288 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import datetime |
|
22 | 22 | import mock |
|
23 | 23 | import os |
|
24 | 24 | import sys |
|
25 | 25 | import shutil |
|
26 | 26 | |
|
27 | 27 | import pytest |
|
28 | 28 | |
|
29 | 29 | from rhodecode.lib.utils import make_db_config |
|
30 | 30 | from rhodecode.lib.vcs.backends.base import Reference |
|
31 | 31 | from rhodecode.lib.vcs.backends.git import ( |
|
32 | 32 | GitRepository, GitCommit, discover_git_version) |
|
33 | 33 | from rhodecode.lib.vcs.exceptions import ( |
|
34 | 34 | RepositoryError, VCSError, NodeDoesNotExistError) |
|
35 | 35 | from rhodecode.lib.vcs.nodes import ( |
|
36 | 36 | NodeKind, FileNode, DirNode, NodeState, SubModuleNode) |
|
37 | 37 | from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir |
|
38 | 38 | from rhodecode.tests.vcs.conftest import BackendTestMixin |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | pytestmark = pytest.mark.backends("git") |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | def repo_path_generator(): |
|
45 | 45 | """ |
|
46 | 46 | Return a different path to be used for cloning repos. |
|
47 | 47 | """ |
|
48 | 48 | i = 0 |
|
49 | 49 | while True: |
|
50 | 50 | i += 1 |
|
51 | 51 | yield '%s-%d' % (TEST_GIT_REPO_CLONE, i) |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | REPO_PATH_GENERATOR = repo_path_generator() |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | class TestGitRepository: |
|
58 | 58 | |
|
59 | 59 | # pylint: disable=protected-access |
|
60 | 60 | |
|
61 | 61 | def __check_for_existing_repo(self): |
|
62 | 62 | if os.path.exists(TEST_GIT_REPO_CLONE): |
|
63 | 63 | self.fail('Cannot test git clone repo as location %s already ' |
|
64 | 64 | 'exists. You should manually remove it first.' |
|
65 | 65 | % TEST_GIT_REPO_CLONE) |
|
66 | 66 | |
|
67 | 67 | @pytest.fixture(autouse=True) |
|
68 | 68 | def prepare(self, request, baseapp): |
|
69 | 69 | self.repo = GitRepository(TEST_GIT_REPO, bare=True) |
|
70 | 70 | |
|
71 | 71 | def get_clone_repo(self): |
|
72 | 72 | """ |
|
73 | 73 | Return a non bare clone of the base repo. |
|
74 | 74 | """ |
|
75 | 75 | clone_path = next(REPO_PATH_GENERATOR) |
|
76 | 76 | repo_clone = GitRepository( |
|
77 | 77 | clone_path, create=True, src_url=self.repo.path, bare=False) |
|
78 | 78 | |
|
79 | 79 | return repo_clone |
|
80 | 80 | |
|
81 | 81 | def get_empty_repo(self, bare=False): |
|
82 | 82 | """ |
|
83 | 83 | Return a non bare empty repo. |
|
84 | 84 | """ |
|
85 | 85 | return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare) |
|
86 | 86 | |
|
87 | 87 | def test_wrong_repo_path(self): |
|
88 | 88 | wrong_repo_path = '/tmp/errorrepo_git' |
|
89 | 89 | with pytest.raises(RepositoryError): |
|
90 | 90 | GitRepository(wrong_repo_path) |
|
91 | 91 | |
|
92 | 92 | def test_repo_clone(self): |
|
93 | 93 | self.__check_for_existing_repo() |
|
94 | 94 | repo = GitRepository(TEST_GIT_REPO) |
|
95 | 95 | repo_clone = GitRepository( |
|
96 | 96 | TEST_GIT_REPO_CLONE, |
|
97 |
src_url=TEST_GIT_REPO, create=True, |
|
|
97 | src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True) | |
|
98 | 98 | assert len(repo.commit_ids) == len(repo_clone.commit_ids) |
|
99 | 99 | # Checking hashes of commits should be enough |
|
100 | 100 | for commit in repo.get_commits(): |
|
101 | 101 | raw_id = commit.raw_id |
|
102 | 102 | assert raw_id == repo_clone.get_commit(raw_id).raw_id |
|
103 | 103 | |
|
104 | 104 | def test_repo_clone_without_create(self): |
|
105 | 105 | with pytest.raises(RepositoryError): |
|
106 | 106 | GitRepository( |
|
107 | 107 | TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO) |
|
108 | 108 | |
|
109 | 109 | def test_repo_clone_with_update(self): |
|
110 | 110 | repo = GitRepository(TEST_GIT_REPO) |
|
111 | 111 | clone_path = TEST_GIT_REPO_CLONE + '_with_update' |
|
112 | 112 | repo_clone = GitRepository( |
|
113 | 113 | clone_path, |
|
114 |
create=True, src_url=TEST_GIT_REPO, |
|
|
114 | create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True) | |
|
115 | 115 | assert len(repo.commit_ids) == len(repo_clone.commit_ids) |
|
116 | 116 | |
|
117 | 117 | # check if current workdir was updated |
|
118 | 118 | fpath = os.path.join(clone_path, 'MANIFEST.in') |
|
119 | 119 | assert os.path.isfile(fpath) |
|
120 | 120 | |
|
121 | 121 | def test_repo_clone_without_update(self): |
|
122 | 122 | repo = GitRepository(TEST_GIT_REPO) |
|
123 | 123 | clone_path = TEST_GIT_REPO_CLONE + '_without_update' |
|
124 | 124 | repo_clone = GitRepository( |
|
125 | 125 | clone_path, |
|
126 |
create=True, src_url=TEST_GIT_REPO, |
|
|
126 | create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False) | |
|
127 | 127 | assert len(repo.commit_ids) == len(repo_clone.commit_ids) |
|
128 | 128 | # check if current workdir was *NOT* updated |
|
129 | 129 | fpath = os.path.join(clone_path, 'MANIFEST.in') |
|
130 | 130 | # Make sure it's not bare repo |
|
131 | 131 | assert not repo_clone.bare |
|
132 | 132 | assert not os.path.isfile(fpath) |
|
133 | 133 | |
|
134 | 134 | def test_repo_clone_into_bare_repo(self): |
|
135 | 135 | repo = GitRepository(TEST_GIT_REPO) |
|
136 | 136 | clone_path = TEST_GIT_REPO_CLONE + '_bare.git' |
|
137 | 137 | repo_clone = GitRepository( |
|
138 | 138 | clone_path, create=True, src_url=repo.path, bare=True) |
|
139 | 139 | assert repo_clone.bare |
|
140 | 140 | |
|
141 | 141 | def test_create_repo_is_not_bare_by_default(self): |
|
142 | 142 | repo = GitRepository(get_new_dir('not-bare-by-default'), create=True) |
|
143 | 143 | assert not repo.bare |
|
144 | 144 | |
|
145 | 145 | def test_create_bare_repo(self): |
|
146 | 146 | repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True) |
|
147 | 147 | assert repo.bare |
|
148 | 148 | |
|
149 | 149 | def test_update_server_info(self): |
|
150 | 150 | self.repo._update_server_info() |
|
151 | 151 | |
|
152 | 152 | def test_fetch(self, vcsbackend_git): |
|
153 | 153 | # Note: This is a git specific part of the API, it's only implemented |
|
154 | 154 | # by the git backend. |
|
155 | 155 | source_repo = vcsbackend_git.repo |
|
156 | target_repo = vcsbackend_git.create_repo() | |
|
156 | target_repo = vcsbackend_git.create_repo(bare=True) | |
|
157 | 157 | target_repo.fetch(source_repo.path) |
|
158 | 158 | # Note: Get a fresh instance, avoids caching trouble |
|
159 | 159 | target_repo = vcsbackend_git.backend(target_repo.path) |
|
160 | 160 | assert len(source_repo.commit_ids) == len(target_repo.commit_ids) |
|
161 | 161 | |
|
162 | 162 | def test_commit_ids(self): |
|
163 | 163 | # there are 112 commits (by now) |
|
164 | 164 | # so we can assume they would be available from now on |
|
165 | subset = set([ | |
|
166 | 'c1214f7e79e02fc37156ff215cd71275450cffc3', | |
|
165 | subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3', | |
|
167 | 166 | '38b5fe81f109cb111f549bfe9bb6b267e10bc557', |
|
168 | 167 | 'fa6600f6848800641328adbf7811fd2372c02ab2', |
|
169 | 168 | '102607b09cdd60e2793929c4f90478be29f85a17', |
|
170 | 169 | '49d3fd156b6f7db46313fac355dca1a0b94a0017', |
|
171 | 170 | '2d1028c054665b962fa3d307adfc923ddd528038', |
|
172 | 171 | 'd7e0d30fbcae12c90680eb095a4f5f02505ce501', |
|
173 | 172 | 'ff7ca51e58c505fec0dd2491de52c622bb7a806b', |
|
174 | 173 | 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f', |
|
175 | 174 | '8430a588b43b5d6da365400117c89400326e7992', |
|
176 | 175 | 'd955cd312c17b02143c04fa1099a352b04368118', |
|
177 | 176 | 'f67b87e5c629c2ee0ba58f85197e423ff28d735b', |
|
178 | 177 | 'add63e382e4aabc9e1afdc4bdc24506c269b7618', |
|
179 | 178 | 'f298fe1189f1b69779a4423f40b48edf92a703fc', |
|
180 | 179 | 'bd9b619eb41994cac43d67cf4ccc8399c1125808', |
|
181 | 180 | '6e125e7c890379446e98980d8ed60fba87d0f6d1', |
|
182 | 181 | 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd', |
|
183 | 182 | '0b05e4ed56c802098dfc813cbe779b2f49e92500', |
|
184 | 183 | '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e', |
|
185 | 184 | '45223f8f114c64bf4d6f853e3c35a369a6305520', |
|
186 | 185 | 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e', |
|
187 | 186 | 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68', |
|
188 | 187 | '27d48942240f5b91dfda77accd2caac94708cc7d', |
|
189 | 188 | '622f0eb0bafd619d2560c26f80f09e3b0b0d78af', |
|
190 |
'e686b958768ee96af8029fe19c6050b1a8dd3b2b' |
|
|
189 | 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'} | |
|
191 | 190 | assert subset.issubset(set(self.repo.commit_ids)) |
|
192 | 191 | |
|
193 | 192 | def test_slicing(self): |
|
194 | 193 | # 4 1 5 10 95 |
|
195 | 194 | for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), |
|
196 | 195 | (10, 20, 10), (5, 100, 95)]: |
|
197 | 196 | commit_ids = list(self.repo[sfrom:sto]) |
|
198 | 197 | assert len(commit_ids) == size |
|
199 | 198 | assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom) |
|
200 | 199 | assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1) |
|
201 | 200 | |
|
202 | 201 | def test_branches(self): |
|
203 | 202 | # TODO: Need more tests here |
|
204 | 203 | # Removed (those are 'remotes' branches for cloned repo) |
|
205 | 204 | # assert 'master' in self.repo.branches |
|
206 | 205 | # assert 'gittree' in self.repo.branches |
|
207 | 206 | # assert 'web-branch' in self.repo.branches |
|
208 | 207 | for __, commit_id in self.repo.branches.items(): |
|
209 | 208 | assert isinstance(self.repo.get_commit(commit_id), GitCommit) |
|
210 | 209 | |
|
211 | 210 | def test_tags(self): |
|
212 | 211 | # TODO: Need more tests here |
|
213 | 212 | assert 'v0.1.1' in self.repo.tags |
|
214 | 213 | assert 'v0.1.2' in self.repo.tags |
|
215 | 214 | for __, commit_id in self.repo.tags.items(): |
|
216 | 215 | assert isinstance(self.repo.get_commit(commit_id), GitCommit) |
|
217 | 216 | |
|
218 | 217 | def _test_single_commit_cache(self, commit_id): |
|
219 | 218 | commit = self.repo.get_commit(commit_id) |
|
220 | 219 | assert commit_id in self.repo.commits |
|
221 | 220 | assert commit is self.repo.commits[commit_id] |
|
222 | 221 | |
|
223 | 222 | def test_initial_commit(self): |
|
224 | 223 | commit_id = self.repo.commit_ids[0] |
|
225 | 224 | init_commit = self.repo.get_commit(commit_id) |
|
226 | 225 | init_author = init_commit.author |
|
227 | 226 | |
|
228 | 227 | assert init_commit.message == 'initial import\n' |
|
229 | 228 | assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>' |
|
230 | 229 | assert init_author == init_commit.committer |
|
231 | 230 | for path in ('vcs/__init__.py', |
|
232 | 231 | 'vcs/backends/BaseRepository.py', |
|
233 | 232 | 'vcs/backends/__init__.py'): |
|
234 | 233 | assert isinstance(init_commit.get_node(path), FileNode) |
|
235 | 234 | for path in ('', 'vcs', 'vcs/backends'): |
|
236 | 235 | assert isinstance(init_commit.get_node(path), DirNode) |
|
237 | 236 | |
|
238 | 237 | with pytest.raises(NodeDoesNotExistError): |
|
239 | 238 | init_commit.get_node(path='foobar') |
|
240 | 239 | |
|
241 | 240 | node = init_commit.get_node('vcs/') |
|
242 | 241 | assert hasattr(node, 'kind') |
|
243 | 242 | assert node.kind == NodeKind.DIR |
|
244 | 243 | |
|
245 | 244 | node = init_commit.get_node('vcs') |
|
246 | 245 | assert hasattr(node, 'kind') |
|
247 | 246 | assert node.kind == NodeKind.DIR |
|
248 | 247 | |
|
249 | 248 | node = init_commit.get_node('vcs/__init__.py') |
|
250 | 249 | assert hasattr(node, 'kind') |
|
251 | 250 | assert node.kind == NodeKind.FILE |
|
252 | 251 | |
|
253 | 252 | def test_not_existing_commit(self): |
|
254 | 253 | with pytest.raises(RepositoryError): |
|
255 | 254 | self.repo.get_commit('f' * 40) |
|
256 | 255 | |
|
257 | 256 | def test_commit10(self): |
|
258 | 257 | |
|
259 | 258 | commit10 = self.repo.get_commit(self.repo.commit_ids[9]) |
|
260 | 259 | README = """=== |
|
261 | 260 | VCS |
|
262 | 261 | === |
|
263 | 262 | |
|
264 | 263 | Various Version Control System management abstraction layer for Python. |
|
265 | 264 | |
|
266 | 265 | Introduction |
|
267 | 266 | ------------ |
|
268 | 267 | |
|
269 | 268 | TODO: To be written... |
|
270 | 269 | |
|
271 | 270 | """ |
|
272 | 271 | node = commit10.get_node('README.rst') |
|
273 | 272 | assert node.kind == NodeKind.FILE |
|
274 | 273 | assert node.content == README |
|
275 | 274 | |
|
276 | 275 | def test_head(self): |
|
277 | 276 | assert self.repo.head == self.repo.get_commit().raw_id |
|
278 | 277 | |
|
279 | 278 | def test_checkout_with_create(self): |
|
280 | 279 | repo_clone = self.get_clone_repo() |
|
281 | 280 | |
|
282 | 281 | new_branch = 'new_branch' |
|
283 | 282 | assert repo_clone._current_branch() == 'master' |
|
284 |
assert set(repo_clone.branches) == |
|
|
283 | assert set(repo_clone.branches) == {'master'} | |
|
285 | 284 | repo_clone._checkout(new_branch, create=True) |
|
286 | 285 | |
|
287 | 286 | # Branches is a lazy property so we need to recrete the Repo object. |
|
288 | 287 | repo_clone = GitRepository(repo_clone.path) |
|
289 |
assert set(repo_clone.branches) == |
|
|
288 | assert set(repo_clone.branches) == {'master', new_branch} | |
|
290 | 289 | assert repo_clone._current_branch() == new_branch |
|
291 | 290 | |
|
292 | 291 | def test_checkout(self): |
|
293 | 292 | repo_clone = self.get_clone_repo() |
|
294 | 293 | |
|
295 | 294 | repo_clone._checkout('new_branch', create=True) |
|
296 | 295 | repo_clone._checkout('master') |
|
297 | 296 | |
|
298 | 297 | assert repo_clone._current_branch() == 'master' |
|
299 | 298 | |
|
300 | 299 | def test_checkout_same_branch(self): |
|
301 | 300 | repo_clone = self.get_clone_repo() |
|
302 | 301 | |
|
303 | 302 | repo_clone._checkout('master') |
|
304 | 303 | assert repo_clone._current_branch() == 'master' |
|
305 | 304 | |
|
306 | 305 | def test_checkout_branch_already_exists(self): |
|
307 | 306 | repo_clone = self.get_clone_repo() |
|
308 | 307 | |
|
309 | 308 | with pytest.raises(RepositoryError): |
|
310 | 309 | repo_clone._checkout('master', create=True) |
|
311 | 310 | |
|
312 | 311 | def test_checkout_bare_repo(self): |
|
313 | 312 | with pytest.raises(RepositoryError): |
|
314 | 313 | self.repo._checkout('master') |
|
315 | 314 | |
|
316 | 315 | def test_current_branch_bare_repo(self): |
|
317 | 316 | with pytest.raises(RepositoryError): |
|
318 | 317 | self.repo._current_branch() |
|
319 | 318 | |
|
320 | 319 | def test_current_branch_empty_repo(self): |
|
321 | 320 | repo = self.get_empty_repo() |
|
322 | 321 | assert repo._current_branch() is None |
|
323 | 322 | |
|
324 | 323 | def test_local_clone(self): |
|
325 | 324 | clone_path = next(REPO_PATH_GENERATOR) |
|
326 | 325 | self.repo._local_clone(clone_path, 'master') |
|
327 | 326 | repo_clone = GitRepository(clone_path) |
|
328 | 327 | |
|
329 | 328 | assert self.repo.commit_ids == repo_clone.commit_ids |
|
330 | 329 | |
|
331 | 330 | def test_local_clone_with_specific_branch(self): |
|
332 | 331 | source_repo = self.get_clone_repo() |
|
333 | 332 | |
|
334 | 333 | # Create a new branch in source repo |
|
335 | 334 | new_branch_commit = source_repo.commit_ids[-3] |
|
336 | 335 | source_repo._checkout(new_branch_commit) |
|
337 | 336 | source_repo._checkout('new_branch', create=True) |
|
338 | 337 | |
|
339 | 338 | clone_path = next(REPO_PATH_GENERATOR) |
|
340 | 339 | source_repo._local_clone(clone_path, 'new_branch') |
|
341 | 340 | repo_clone = GitRepository(clone_path) |
|
342 | 341 | |
|
343 | 342 | assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids |
|
344 | 343 | |
|
345 | 344 | clone_path = next(REPO_PATH_GENERATOR) |
|
346 | 345 | source_repo._local_clone(clone_path, 'master') |
|
347 | 346 | repo_clone = GitRepository(clone_path) |
|
348 | 347 | |
|
349 | 348 | assert source_repo.commit_ids == repo_clone.commit_ids |
|
350 | 349 | |
|
351 | 350 | def test_local_clone_fails_if_target_exists(self): |
|
352 | 351 | with pytest.raises(RepositoryError): |
|
353 | 352 | self.repo._local_clone(self.repo.path, 'master') |
|
354 | 353 | |
|
355 | 354 | def test_local_fetch(self): |
|
356 | 355 | target_repo = self.get_empty_repo() |
|
357 | 356 | source_repo = self.get_clone_repo() |
|
358 | 357 | |
|
359 | 358 | # Create a new branch in source repo |
|
360 | 359 | master_commit = source_repo.commit_ids[-1] |
|
361 | 360 | new_branch_commit = source_repo.commit_ids[-3] |
|
362 | 361 | source_repo._checkout(new_branch_commit) |
|
363 | 362 | source_repo._checkout('new_branch', create=True) |
|
364 | 363 | |
|
365 | 364 | target_repo._local_fetch(source_repo.path, 'new_branch') |
|
366 | 365 | assert target_repo._last_fetch_heads() == [new_branch_commit] |
|
367 | 366 | |
|
368 | 367 | target_repo._local_fetch(source_repo.path, 'master') |
|
369 | 368 | assert target_repo._last_fetch_heads() == [master_commit] |
|
370 | 369 | |
|
371 | 370 | def test_local_fetch_from_bare_repo(self): |
|
372 | 371 | target_repo = self.get_empty_repo() |
|
373 | 372 | target_repo._local_fetch(self.repo.path, 'master') |
|
374 | 373 | |
|
375 | 374 | master_commit = self.repo.commit_ids[-1] |
|
376 | 375 | assert target_repo._last_fetch_heads() == [master_commit] |
|
377 | 376 | |
|
378 | 377 | def test_local_fetch_from_same_repo(self): |
|
379 | 378 | with pytest.raises(ValueError): |
|
380 | 379 | self.repo._local_fetch(self.repo.path, 'master') |
|
381 | 380 | |
|
382 | 381 | def test_local_fetch_branch_does_not_exist(self): |
|
383 | 382 | target_repo = self.get_empty_repo() |
|
384 | 383 | |
|
385 | 384 | with pytest.raises(RepositoryError): |
|
386 | 385 | target_repo._local_fetch(self.repo.path, 'new_branch') |
|
387 | 386 | |
|
388 | 387 | def test_local_pull(self): |
|
389 | 388 | target_repo = self.get_empty_repo() |
|
390 | 389 | source_repo = self.get_clone_repo() |
|
391 | 390 | |
|
392 | 391 | # Create a new branch in source repo |
|
393 | 392 | master_commit = source_repo.commit_ids[-1] |
|
394 | 393 | new_branch_commit = source_repo.commit_ids[-3] |
|
395 | 394 | source_repo._checkout(new_branch_commit) |
|
396 | 395 | source_repo._checkout('new_branch', create=True) |
|
397 | 396 | |
|
398 | 397 | target_repo._local_pull(source_repo.path, 'new_branch') |
|
399 | 398 | target_repo = GitRepository(target_repo.path) |
|
400 | 399 | assert target_repo.head == new_branch_commit |
|
401 | 400 | |
|
402 | 401 | target_repo._local_pull(source_repo.path, 'master') |
|
403 | 402 | target_repo = GitRepository(target_repo.path) |
|
404 | 403 | assert target_repo.head == master_commit |
|
405 | 404 | |
|
406 | 405 | def test_local_pull_in_bare_repo(self): |
|
407 | 406 | with pytest.raises(RepositoryError): |
|
408 | 407 | self.repo._local_pull(self.repo.path, 'master') |
|
409 | 408 | |
|
410 | 409 | def test_local_merge(self): |
|
411 | 410 | target_repo = self.get_empty_repo() |
|
412 | 411 | source_repo = self.get_clone_repo() |
|
413 | 412 | |
|
414 | 413 | # Create a new branch in source repo |
|
415 | 414 | master_commit = source_repo.commit_ids[-1] |
|
416 | 415 | new_branch_commit = source_repo.commit_ids[-3] |
|
417 | 416 | source_repo._checkout(new_branch_commit) |
|
418 | 417 | source_repo._checkout('new_branch', create=True) |
|
419 | 418 | |
|
420 | 419 | # This is required as one cannot do a -ff-only merge in an empty repo. |
|
421 | 420 | target_repo._local_pull(source_repo.path, 'new_branch') |
|
422 | 421 | |
|
423 | 422 | target_repo._local_fetch(source_repo.path, 'master') |
|
424 | 423 | merge_message = 'Merge message\n\nDescription:...' |
|
425 | 424 | user_name = 'Albert Einstein' |
|
426 | 425 | user_email = 'albert@einstein.com' |
|
427 | 426 | target_repo._local_merge(merge_message, user_name, user_email, |
|
428 | 427 | target_repo._last_fetch_heads()) |
|
429 | 428 | |
|
430 | 429 | target_repo = GitRepository(target_repo.path) |
|
431 | 430 | assert target_repo.commit_ids[-2] == master_commit |
|
432 | 431 | last_commit = target_repo.get_commit(target_repo.head) |
|
433 | 432 | assert last_commit.message.strip() == merge_message |
|
434 | 433 | assert last_commit.author == '%s <%s>' % (user_name, user_email) |
|
435 | 434 | |
|
436 | 435 | assert not os.path.exists( |
|
437 | 436 | os.path.join(target_repo.path, '.git', 'MERGE_HEAD')) |
|
438 | 437 | |
|
439 | 438 | def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git): |
|
440 | 439 | target_repo = vcsbackend_git.create_repo(number_of_commits=1) |
|
441 | 440 | vcsbackend_git.ensure_file('README', 'I will conflict with you!!!') |
|
442 | 441 | |
|
443 | 442 | target_repo._local_fetch(self.repo.path, 'master') |
|
444 | 443 | with pytest.raises(RepositoryError): |
|
445 | 444 | target_repo._local_merge( |
|
446 | 445 | 'merge_message', 'user name', 'user@name.com', |
|
447 | 446 | target_repo._last_fetch_heads()) |
|
448 | 447 | |
|
449 | 448 | # Check we are not left in an intermediate merge state |
|
450 | 449 | assert not os.path.exists( |
|
451 | 450 | os.path.join(target_repo.path, '.git', 'MERGE_HEAD')) |
|
452 | 451 | |
|
453 | 452 | def test_local_merge_into_empty_repo(self): |
|
454 | 453 | target_repo = self.get_empty_repo() |
|
455 | 454 | |
|
456 | 455 | # This is required as one cannot do a -ff-only merge in an empty repo. |
|
457 | 456 | target_repo._local_fetch(self.repo.path, 'master') |
|
458 | 457 | with pytest.raises(RepositoryError): |
|
459 | 458 | target_repo._local_merge( |
|
460 | 459 | 'merge_message', 'user name', 'user@name.com', |
|
461 | 460 | target_repo._last_fetch_heads()) |
|
462 | 461 | |
|
463 | 462 | def test_local_merge_in_bare_repo(self): |
|
464 | 463 | with pytest.raises(RepositoryError): |
|
465 | 464 | self.repo._local_merge( |
|
466 | 465 | 'merge_message', 'user name', 'user@name.com', None) |
|
467 | 466 | |
|
468 | 467 | def test_local_push_non_bare(self): |
|
469 | 468 | target_repo = self.get_empty_repo() |
|
470 | 469 | |
|
471 | 470 | pushed_branch = 'pushed_branch' |
|
472 | 471 | self.repo._local_push('master', target_repo.path, pushed_branch) |
|
473 | 472 | # Fix the HEAD of the target repo, or otherwise GitRepository won't |
|
474 | 473 | # report any branches. |
|
475 | 474 | with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f: |
|
476 | 475 | f.write('ref: refs/heads/%s' % pushed_branch) |
|
477 | 476 | |
|
478 | 477 | target_repo = GitRepository(target_repo.path) |
|
479 | 478 | |
|
480 | 479 | assert (target_repo.branches[pushed_branch] == |
|
481 | 480 | self.repo.branches['master']) |
|
482 | 481 | |
|
483 | 482 | def test_local_push_bare(self): |
|
484 | 483 | target_repo = self.get_empty_repo(bare=True) |
|
485 | 484 | |
|
486 | 485 | pushed_branch = 'pushed_branch' |
|
487 | 486 | self.repo._local_push('master', target_repo.path, pushed_branch) |
|
488 | 487 | # Fix the HEAD of the target repo, or otherwise GitRepository won't |
|
489 | 488 | # report any branches. |
|
490 | 489 | with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f: |
|
491 | 490 | f.write('ref: refs/heads/%s' % pushed_branch) |
|
492 | 491 | |
|
493 | 492 | target_repo = GitRepository(target_repo.path) |
|
494 | 493 | |
|
495 | 494 | assert (target_repo.branches[pushed_branch] == |
|
496 | 495 | self.repo.branches['master']) |
|
497 | 496 | |
|
498 | 497 | def test_local_push_non_bare_target_branch_is_checked_out(self): |
|
499 | 498 | target_repo = self.get_clone_repo() |
|
500 | 499 | |
|
501 | 500 | pushed_branch = 'pushed_branch' |
|
502 | 501 | # Create a new branch in source repo |
|
503 | 502 | new_branch_commit = target_repo.commit_ids[-3] |
|
504 | 503 | target_repo._checkout(new_branch_commit) |
|
505 | 504 | target_repo._checkout(pushed_branch, create=True) |
|
506 | 505 | |
|
507 | 506 | self.repo._local_push('master', target_repo.path, pushed_branch) |
|
508 | 507 | |
|
509 | 508 | target_repo = GitRepository(target_repo.path) |
|
510 | 509 | |
|
511 | 510 | assert (target_repo.branches[pushed_branch] == |
|
512 | 511 | self.repo.branches['master']) |
|
513 | 512 | |
|
514 | 513 | def test_local_push_raises_exception_on_conflict(self, vcsbackend_git): |
|
515 | 514 | target_repo = vcsbackend_git.create_repo(number_of_commits=1) |
|
516 | 515 | with pytest.raises(RepositoryError): |
|
517 | 516 | self.repo._local_push('master', target_repo.path, 'master') |
|
518 | 517 | |
|
519 | 518 | def test_hooks_can_be_enabled_via_env_variable_for_local_push(self): |
|
520 | 519 | target_repo = self.get_empty_repo(bare=True) |
|
521 | 520 | |
|
522 | 521 | with mock.patch.object(self.repo, 'run_git_command') as run_mock: |
|
523 | 522 | self.repo._local_push( |
|
524 | 523 | 'master', target_repo.path, 'master', enable_hooks=True) |
|
525 | 524 | env = run_mock.call_args[1]['extra_env'] |
|
526 | 525 | assert 'RC_SKIP_HOOKS' not in env |
|
527 | 526 | |
|
528 | 527 | def _add_failing_hook(self, repo_path, hook_name, bare=False): |
|
529 | 528 | path_components = ( |
|
530 | 529 | ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name]) |
|
531 | 530 | hook_path = os.path.join(repo_path, *path_components) |
|
532 | 531 | with open(hook_path, 'w') as f: |
|
533 | 532 | script_lines = [ |
|
534 | 533 | '#!%s' % sys.executable, |
|
535 | 534 | 'import os', |
|
536 | 535 | 'import sys', |
|
537 | 536 | 'if os.environ.get("RC_SKIP_HOOKS"):', |
|
538 | 537 | ' sys.exit(0)', |
|
539 | 538 | 'sys.exit(1)', |
|
540 | 539 | ] |
|
541 | 540 | f.write('\n'.join(script_lines)) |
|
542 | 541 | os.chmod(hook_path, 0755) |
|
543 | 542 | |
|
544 | 543 | def test_local_push_does_not_execute_hook(self): |
|
545 | 544 | target_repo = self.get_empty_repo() |
|
546 | 545 | |
|
547 | 546 | pushed_branch = 'pushed_branch' |
|
548 | 547 | self._add_failing_hook(target_repo.path, 'pre-receive') |
|
549 | 548 | self.repo._local_push('master', target_repo.path, pushed_branch) |
|
550 | 549 | # Fix the HEAD of the target repo, or otherwise GitRepository won't |
|
551 | 550 | # report any branches. |
|
552 | 551 | with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f: |
|
553 | 552 | f.write('ref: refs/heads/%s' % pushed_branch) |
|
554 | 553 | |
|
555 | 554 | target_repo = GitRepository(target_repo.path) |
|
556 | 555 | |
|
557 | 556 | assert (target_repo.branches[pushed_branch] == |
|
558 | 557 | self.repo.branches['master']) |
|
559 | 558 | |
|
560 | 559 | def test_local_push_executes_hook(self): |
|
561 | 560 | target_repo = self.get_empty_repo(bare=True) |
|
562 | 561 | self._add_failing_hook(target_repo.path, 'pre-receive', bare=True) |
|
563 | 562 | with pytest.raises(RepositoryError): |
|
564 | 563 | self.repo._local_push( |
|
565 | 564 | 'master', target_repo.path, 'master', enable_hooks=True) |
|
566 | 565 | |
|
567 | 566 | def test_maybe_prepare_merge_workspace(self): |
|
568 | 567 | workspace = self.repo._maybe_prepare_merge_workspace( |
|
569 | 568 | 2, 'pr2', Reference('branch', 'master', 'unused'), |
|
570 | 569 | Reference('branch', 'master', 'unused')) |
|
571 | 570 | |
|
572 | 571 | assert os.path.isdir(workspace) |
|
573 | 572 | workspace_repo = GitRepository(workspace) |
|
574 | 573 | assert workspace_repo.branches == self.repo.branches |
|
575 | 574 | |
|
576 | 575 | # Calling it a second time should also succeed |
|
577 | 576 | workspace = self.repo._maybe_prepare_merge_workspace( |
|
578 | 577 | 2, 'pr2', Reference('branch', 'master', 'unused'), |
|
579 | 578 | Reference('branch', 'master', 'unused')) |
|
580 | 579 | assert os.path.isdir(workspace) |
|
581 | 580 | |
|
582 | 581 | def test_maybe_prepare_merge_workspace_different_refs(self): |
|
583 | 582 | workspace = self.repo._maybe_prepare_merge_workspace( |
|
584 | 583 | 2, 'pr2', Reference('branch', 'master', 'unused'), |
|
585 | 584 | Reference('branch', 'develop', 'unused')) |
|
586 | 585 | |
|
587 | 586 | assert os.path.isdir(workspace) |
|
588 | 587 | workspace_repo = GitRepository(workspace) |
|
589 | 588 | assert workspace_repo.branches == self.repo.branches |
|
590 | 589 | |
|
591 | 590 | # Calling it a second time should also succeed |
|
592 | 591 | workspace = self.repo._maybe_prepare_merge_workspace( |
|
593 | 592 | 2, 'pr2', Reference('branch', 'master', 'unused'), |
|
594 | 593 | Reference('branch', 'develop', 'unused')) |
|
595 | 594 | assert os.path.isdir(workspace) |
|
596 | 595 | |
|
597 | 596 | def test_cleanup_merge_workspace(self): |
|
598 | 597 | workspace = self.repo._maybe_prepare_merge_workspace( |
|
599 | 598 | 2, 'pr3', Reference('branch', 'master', 'unused'), |
|
600 | 599 | Reference('branch', 'master', 'unused')) |
|
601 | 600 | self.repo.cleanup_merge_workspace(2, 'pr3') |
|
602 | 601 | |
|
603 | 602 | assert not os.path.exists(workspace) |
|
604 | 603 | |
|
605 | 604 | def test_cleanup_merge_workspace_invalid_workspace_id(self): |
|
606 | 605 | # No assert: because in case of an inexistent workspace this function |
|
607 | 606 | # should still succeed. |
|
608 | 607 | self.repo.cleanup_merge_workspace(1, 'pr4') |
|
609 | 608 | |
|
610 | 609 | def test_set_refs(self): |
|
611 | 610 | test_ref = 'refs/test-refs/abcde' |
|
612 | 611 | test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623' |
|
613 | 612 | |
|
614 | 613 | self.repo.set_refs(test_ref, test_commit_id) |
|
615 | 614 | stdout, _ = self.repo.run_git_command(['show-ref']) |
|
616 | 615 | assert test_ref in stdout |
|
617 | 616 | assert test_commit_id in stdout |
|
618 | 617 | |
|
619 | 618 | def test_remove_ref(self): |
|
620 | 619 | test_ref = 'refs/test-refs/abcde' |
|
621 | 620 | test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623' |
|
622 | 621 | self.repo.set_refs(test_ref, test_commit_id) |
|
623 | 622 | stdout, _ = self.repo.run_git_command(['show-ref']) |
|
624 | 623 | assert test_ref in stdout |
|
625 | 624 | assert test_commit_id in stdout |
|
626 | 625 | |
|
627 | 626 | self.repo.remove_ref(test_ref) |
|
628 | 627 | stdout, _ = self.repo.run_git_command(['show-ref']) |
|
629 | 628 | assert test_ref not in stdout |
|
630 | 629 | assert test_commit_id not in stdout |
|
631 | 630 | |
|
632 | 631 | |
|
633 | 632 | class TestGitCommit(object): |
|
634 | 633 | |
|
635 | 634 | @pytest.fixture(autouse=True) |
|
636 | 635 | def prepare(self): |
|
637 | 636 | self.repo = GitRepository(TEST_GIT_REPO) |
|
638 | 637 | |
|
639 | 638 | def test_default_commit(self): |
|
640 | 639 | tip = self.repo.get_commit() |
|
641 | 640 | assert tip == self.repo.get_commit(None) |
|
642 | 641 | assert tip == self.repo.get_commit('tip') |
|
643 | 642 | |
|
644 | 643 | def test_root_node(self): |
|
645 | 644 | tip = self.repo.get_commit() |
|
646 | 645 | assert tip.root is tip.get_node('') |
|
647 | 646 | |
|
648 | 647 | def test_lazy_fetch(self): |
|
649 | 648 | """ |
|
650 | 649 | Test if commit's nodes expands and are cached as we walk through |
|
651 | 650 | the commit. This test is somewhat hard to write as order of tests |
|
652 | 651 | is a key here. Written by running command after command in a shell. |
|
653 | 652 | """ |
|
654 | 653 | commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc' |
|
655 | 654 | assert commit_id in self.repo.commit_ids |
|
656 | 655 | commit = self.repo.get_commit(commit_id) |
|
657 | 656 | assert len(commit.nodes) == 0 |
|
658 | 657 | root = commit.root |
|
659 | 658 | assert len(commit.nodes) == 1 |
|
660 | 659 | assert len(root.nodes) == 8 |
|
661 | 660 | # accessing root.nodes updates commit.nodes |
|
662 | 661 | assert len(commit.nodes) == 9 |
|
663 | 662 | |
|
664 | 663 | docs = root.get_node('docs') |
|
665 | 664 | # we haven't yet accessed anything new as docs dir was already cached |
|
666 | 665 | assert len(commit.nodes) == 9 |
|
667 | 666 | assert len(docs.nodes) == 8 |
|
668 | 667 | # accessing docs.nodes updates commit.nodes |
|
669 | 668 | assert len(commit.nodes) == 17 |
|
670 | 669 | |
|
671 | 670 | assert docs is commit.get_node('docs') |
|
672 | 671 | assert docs is root.nodes[0] |
|
673 | 672 | assert docs is root.dirs[0] |
|
674 | 673 | assert docs is commit.get_node('docs') |
|
675 | 674 | |
|
676 | 675 | def test_nodes_with_commit(self): |
|
677 | 676 | commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc' |
|
678 | 677 | commit = self.repo.get_commit(commit_id) |
|
679 | 678 | root = commit.root |
|
680 | 679 | docs = root.get_node('docs') |
|
681 | 680 | assert docs is commit.get_node('docs') |
|
682 | 681 | api = docs.get_node('api') |
|
683 | 682 | assert api is commit.get_node('docs/api') |
|
684 | 683 | index = api.get_node('index.rst') |
|
685 | 684 | assert index is commit.get_node('docs/api/index.rst') |
|
686 | 685 | assert index is commit.get_node('docs')\ |
|
687 | 686 | .get_node('api')\ |
|
688 | 687 | .get_node('index.rst') |
|
689 | 688 | |
|
690 | 689 | def test_branch_and_tags(self): |
|
691 | 690 | """ |
|
692 | 691 | rev0 = self.repo.commit_ids[0] |
|
693 | 692 | commit0 = self.repo.get_commit(rev0) |
|
694 | 693 | assert commit0.branch == 'master' |
|
695 | 694 | assert commit0.tags == [] |
|
696 | 695 | |
|
697 | 696 | rev10 = self.repo.commit_ids[10] |
|
698 | 697 | commit10 = self.repo.get_commit(rev10) |
|
699 | 698 | assert commit10.branch == 'master' |
|
700 | 699 | assert commit10.tags == [] |
|
701 | 700 | |
|
702 | 701 | rev44 = self.repo.commit_ids[44] |
|
703 | 702 | commit44 = self.repo.get_commit(rev44) |
|
704 | 703 | assert commit44.branch == 'web-branch' |
|
705 | 704 | |
|
706 | 705 | tip = self.repo.get_commit('tip') |
|
707 | 706 | assert 'tip' in tip.tags |
|
708 | 707 | """ |
|
709 | 708 | # Those tests would fail - branches are now going |
|
710 | 709 | # to be changed at main API in order to support git backend |
|
711 | 710 | pass |
|
712 | 711 | |
|
713 | 712 | def test_file_size(self): |
|
714 | 713 | to_check = ( |
|
715 | 714 | ('c1214f7e79e02fc37156ff215cd71275450cffc3', |
|
716 | 715 | 'vcs/backends/BaseRepository.py', 502), |
|
717 | 716 | ('d7e0d30fbcae12c90680eb095a4f5f02505ce501', |
|
718 | 717 | 'vcs/backends/hg.py', 854), |
|
719 | 718 | ('6e125e7c890379446e98980d8ed60fba87d0f6d1', |
|
720 | 719 | 'setup.py', 1068), |
|
721 | 720 | |
|
722 | 721 | ('d955cd312c17b02143c04fa1099a352b04368118', |
|
723 | 722 | 'vcs/backends/base.py', 2921), |
|
724 | 723 | ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e', |
|
725 | 724 | 'vcs/backends/base.py', 3936), |
|
726 | 725 | ('f50f42baeed5af6518ef4b0cb2f1423f3851a941', |
|
727 | 726 | 'vcs/backends/base.py', 6189), |
|
728 | 727 | ) |
|
729 | 728 | for commit_id, path, size in to_check: |
|
730 | 729 | node = self.repo.get_commit(commit_id).get_node(path) |
|
731 | 730 | assert node.is_file() |
|
732 | 731 | assert node.size == size |
|
733 | 732 | |
|
734 | 733 | def test_file_history_from_commits(self): |
|
735 | 734 | node = self.repo[10].get_node('setup.py') |
|
736 | 735 | commit_ids = [commit.raw_id for commit in node.history] |
|
737 | 736 | assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids |
|
738 | 737 | |
|
739 | 738 | node = self.repo[20].get_node('setup.py') |
|
740 | 739 | node_ids = [commit.raw_id for commit in node.history] |
|
741 | 740 | assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e', |
|
742 | 741 | 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids |
|
743 | 742 | |
|
744 | 743 | # special case we check history from commit that has this particular |
|
745 | 744 | # file changed this means we check if it's included as well |
|
746 | 745 | node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \ |
|
747 | 746 | .get_node('setup.py') |
|
748 | 747 | node_ids = [commit.raw_id for commit in node.history] |
|
749 | 748 | assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e', |
|
750 | 749 | 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids |
|
751 | 750 | |
|
752 | 751 | def test_file_history(self): |
|
753 | 752 | # we can only check if those commits are present in the history |
|
754 | 753 | # as we cannot update this test every time file is changed |
|
755 | 754 | files = { |
|
756 | 755 | 'setup.py': [ |
|
757 | 756 | '54386793436c938cff89326944d4c2702340037d', |
|
758 | 757 | '51d254f0ecf5df2ce50c0b115741f4cf13985dab', |
|
759 | 758 | '998ed409c795fec2012b1c0ca054d99888b22090', |
|
760 | 759 | '5e0eb4c47f56564395f76333f319d26c79e2fb09', |
|
761 | 760 | '0115510b70c7229dbc5dc49036b32e7d91d23acd', |
|
762 | 761 | '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e', |
|
763 | 762 | '2a13f185e4525f9d4b59882791a2d397b90d5ddc', |
|
764 | 763 | '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e', |
|
765 | 764 | 'ff7ca51e58c505fec0dd2491de52c622bb7a806b', |
|
766 | 765 | ], |
|
767 | 766 | 'vcs/nodes.py': [ |
|
768 | 767 | '33fa3223355104431402a888fa77a4e9956feb3e', |
|
769 | 768 | 'fa014c12c26d10ba682fadb78f2a11c24c8118e1', |
|
770 | 769 | 'e686b958768ee96af8029fe19c6050b1a8dd3b2b', |
|
771 | 770 | 'ab5721ca0a081f26bf43d9051e615af2cc99952f', |
|
772 | 771 | 'c877b68d18e792a66b7f4c529ea02c8f80801542', |
|
773 | 772 | '4313566d2e417cb382948f8d9d7c765330356054', |
|
774 | 773 | '6c2303a793671e807d1cfc70134c9ca0767d98c2', |
|
775 | 774 | '54386793436c938cff89326944d4c2702340037d', |
|
776 | 775 | '54000345d2e78b03a99d561399e8e548de3f3203', |
|
777 | 776 | '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b', |
|
778 | 777 | '2d03ca750a44440fb5ea8b751176d1f36f8e8f46', |
|
779 | 778 | '2a08b128c206db48c2f0b8f70df060e6db0ae4f8', |
|
780 | 779 | '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b', |
|
781 | 780 | 'ac71e9503c2ca95542839af0ce7b64011b72ea7c', |
|
782 | 781 | '12669288fd13adba2a9b7dd5b870cc23ffab92d2', |
|
783 | 782 | '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382', |
|
784 | 783 | '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5', |
|
785 | 784 | '5eab1222a7cd4bfcbabc218ca6d04276d4e27378', |
|
786 | 785 | 'f50f42baeed5af6518ef4b0cb2f1423f3851a941', |
|
787 | 786 | 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25', |
|
788 | 787 | 'f15c21f97864b4f071cddfbf2750ec2e23859414', |
|
789 | 788 | 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade', |
|
790 | 789 | 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b', |
|
791 | 790 | '84dec09632a4458f79f50ddbbd155506c460b4f9', |
|
792 | 791 | '0115510b70c7229dbc5dc49036b32e7d91d23acd', |
|
793 | 792 | '2a13f185e4525f9d4b59882791a2d397b90d5ddc', |
|
794 | 793 | '3bf1c5868e570e39569d094f922d33ced2fa3b2b', |
|
795 | 794 | 'b8d04012574729d2c29886e53b1a43ef16dd00a1', |
|
796 | 795 | '6970b057cffe4aab0a792aa634c89f4bebf01441', |
|
797 | 796 | 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f', |
|
798 | 797 | 'ff7ca51e58c505fec0dd2491de52c622bb7a806b', |
|
799 | 798 | ], |
|
800 | 799 | 'vcs/backends/git.py': [ |
|
801 | 800 | '4cf116ad5a457530381135e2f4c453e68a1b0105', |
|
802 | 801 | '9a751d84d8e9408e736329767387f41b36935153', |
|
803 | 802 | 'cb681fb539c3faaedbcdf5ca71ca413425c18f01', |
|
804 | 803 | '428f81bb652bcba8d631bce926e8834ff49bdcc6', |
|
805 | 804 | '180ab15aebf26f98f714d8c68715e0f05fa6e1c7', |
|
806 | 805 | '2b8e07312a2e89e92b90426ab97f349f4bce2a3a', |
|
807 | 806 | '50e08c506174d8645a4bb517dd122ac946a0f3bf', |
|
808 | 807 | '54000345d2e78b03a99d561399e8e548de3f3203', |
|
809 | 808 | ], |
|
810 | 809 | } |
|
811 | 810 | for path, commit_ids in files.items(): |
|
812 | 811 | node = self.repo.get_commit(commit_ids[0]).get_node(path) |
|
813 | 812 | node_ids = [commit.raw_id for commit in node.history] |
|
814 | 813 | assert set(commit_ids).issubset(set(node_ids)), ( |
|
815 | 814 | "We assumed that %s is subset of commit_ids for which file %s " |
|
816 | 815 | "has been changed, and history of that node returned: %s" |
|
817 | 816 | % (commit_ids, path, node_ids)) |
|
818 | 817 | |
|
819 | 818 | def test_file_annotate(self): |
|
820 | 819 | files = { |
|
821 | 820 | 'vcs/backends/__init__.py': { |
|
822 | 821 | 'c1214f7e79e02fc37156ff215cd71275450cffc3': { |
|
823 | 822 | 'lines_no': 1, |
|
824 | 823 | 'commits': [ |
|
825 | 824 | 'c1214f7e79e02fc37156ff215cd71275450cffc3', |
|
826 | 825 | ], |
|
827 | 826 | }, |
|
828 | 827 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': { |
|
829 | 828 | 'lines_no': 21, |
|
830 | 829 | 'commits': [ |
|
831 | 830 | '49d3fd156b6f7db46313fac355dca1a0b94a0017', |
|
832 | 831 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
833 | 832 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
834 | 833 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
835 | 834 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
836 | 835 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
837 | 836 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
838 | 837 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
839 | 838 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
840 | 839 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
841 | 840 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
842 | 841 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
843 | 842 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
844 | 843 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
845 | 844 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
846 | 845 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
847 | 846 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
848 | 847 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
849 | 848 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
850 | 849 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
851 | 850 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
852 | 851 | ], |
|
853 | 852 | }, |
|
854 | 853 | 'e29b67bd158580fc90fc5e9111240b90e6e86064': { |
|
855 | 854 | 'lines_no': 32, |
|
856 | 855 | 'commits': [ |
|
857 | 856 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
858 | 857 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
859 | 858 | '5eab1222a7cd4bfcbabc218ca6d04276d4e27378', |
|
860 | 859 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
861 | 860 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
862 | 861 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
863 | 862 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
864 | 863 | '54000345d2e78b03a99d561399e8e548de3f3203', |
|
865 | 864 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
866 | 865 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
867 | 866 | '78c3f0c23b7ee935ec276acb8b8212444c33c396', |
|
868 | 867 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
869 | 868 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
870 | 869 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
871 | 870 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
872 | 871 | '2a13f185e4525f9d4b59882791a2d397b90d5ddc', |
|
873 | 872 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
874 | 873 | '78c3f0c23b7ee935ec276acb8b8212444c33c396', |
|
875 | 874 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
876 | 875 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
877 | 876 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
878 | 877 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
879 | 878 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
880 | 879 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
881 | 880 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
882 | 881 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
883 | 882 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
884 | 883 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
885 | 884 | '992f38217b979d0b0987d0bae3cc26dac85d9b19', |
|
886 | 885 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
887 | 886 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
888 | 887 | '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', |
|
889 | 888 | ], |
|
890 | 889 | }, |
|
891 | 890 | }, |
|
892 | 891 | } |
|
893 | 892 | |
|
894 | 893 | for fname, commit_dict in files.items(): |
|
895 | 894 | for commit_id, __ in commit_dict.items(): |
|
896 | 895 | commit = self.repo.get_commit(commit_id) |
|
897 | 896 | |
|
898 | 897 | l1_1 = [x[1] for x in commit.get_file_annotate(fname)] |
|
899 | 898 | l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)] |
|
900 | 899 | assert l1_1 == l1_2 |
|
901 | 900 | l1 = l1_1 |
|
902 | 901 | l2 = files[fname][commit_id]['commits'] |
|
903 | 902 | assert l1 == l2, ( |
|
904 | 903 | "The lists of commit_ids for %s@commit_id %s" |
|
905 | 904 | "from annotation list should match each other, " |
|
906 | 905 | "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2)) |
|
907 | 906 | |
|
908 | 907 | def test_files_state(self): |
|
909 | 908 | """ |
|
910 | 909 | Tests state of FileNodes. |
|
911 | 910 | """ |
|
912 | 911 | node = self.repo\ |
|
913 | 912 | .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\ |
|
914 | 913 | .get_node('vcs/utils/diffs.py') |
|
915 | 914 | assert node.state, NodeState.ADDED |
|
916 | 915 | assert node.added |
|
917 | 916 | assert not node.changed |
|
918 | 917 | assert not node.not_changed |
|
919 | 918 | assert not node.removed |
|
920 | 919 | |
|
921 | 920 | node = self.repo\ |
|
922 | 921 | .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\ |
|
923 | 922 | .get_node('.hgignore') |
|
924 | 923 | assert node.state, NodeState.CHANGED |
|
925 | 924 | assert not node.added |
|
926 | 925 | assert node.changed |
|
927 | 926 | assert not node.not_changed |
|
928 | 927 | assert not node.removed |
|
929 | 928 | |
|
930 | 929 | node = self.repo\ |
|
931 | 930 | .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\ |
|
932 | 931 | .get_node('setup.py') |
|
933 | 932 | assert node.state, NodeState.NOT_CHANGED |
|
934 | 933 | assert not node.added |
|
935 | 934 | assert not node.changed |
|
936 | 935 | assert node.not_changed |
|
937 | 936 | assert not node.removed |
|
938 | 937 | |
|
939 | 938 | # If node has REMOVED state then trying to fetch it would raise |
|
940 | 939 | # CommitError exception |
|
941 | 940 | commit = self.repo.get_commit( |
|
942 | 941 | 'fa6600f6848800641328adbf7811fd2372c02ab2') |
|
943 | 942 | path = 'vcs/backends/BaseRepository.py' |
|
944 | 943 | with pytest.raises(NodeDoesNotExistError): |
|
945 | 944 | commit.get_node(path) |
|
946 | 945 | # but it would be one of ``removed`` (commit's attribute) |
|
947 | 946 | assert path in [rf.path for rf in commit.removed] |
|
948 | 947 | |
|
949 | 948 | commit = self.repo.get_commit( |
|
950 | 949 | '54386793436c938cff89326944d4c2702340037d') |
|
951 | 950 | changed = [ |
|
952 | 951 | 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py', |
|
953 | 952 | 'vcs/nodes.py'] |
|
954 | 953 | assert set(changed) == set([f.path for f in commit.changed]) |
|
955 | 954 | |
|
956 | 955 | def test_unicode_branch_refs(self): |
|
957 | 956 | unicode_branches = { |
|
958 | 957 | 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b', |
|
959 | 958 | u'refs/heads/uniçâβe': 'ΓΌrl', |
|
960 | 959 | } |
|
961 | 960 | with mock.patch( |
|
962 | 961 | ("rhodecode.lib.vcs.backends.git.repository" |
|
963 | 962 | ".GitRepository._refs"), |
|
964 | 963 | unicode_branches): |
|
965 | 964 | branches = self.repo.branches |
|
966 | 965 | |
|
967 | 966 | assert 'unicode' in branches |
|
968 | 967 | assert u'uniçâβe' in branches |
|
969 | 968 | |
|
970 | 969 | def test_unicode_tag_refs(self): |
|
971 | 970 | unicode_tags = { |
|
972 | 971 | 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b', |
|
973 | 972 | u'refs/tags/uniçâβe': '6c0ce52b229aa978889e91b38777f800e85f330b', |
|
974 | 973 | } |
|
975 | 974 | with mock.patch( |
|
976 | 975 | ("rhodecode.lib.vcs.backends.git.repository" |
|
977 | 976 | ".GitRepository._refs"), |
|
978 | 977 | unicode_tags): |
|
979 | 978 | tags = self.repo.tags |
|
980 | 979 | |
|
981 | 980 | assert 'unicode' in tags |
|
982 | 981 | assert u'uniçâβe' in tags |
|
983 | 982 | |
|
984 | 983 | def test_commit_message_is_unicode(self): |
|
985 | 984 | for commit in self.repo: |
|
986 | 985 | assert type(commit.message) == unicode |
|
987 | 986 | |
|
988 | 987 | def test_commit_author_is_unicode(self): |
|
989 | 988 | for commit in self.repo: |
|
990 | 989 | assert type(commit.author) == unicode |
|
991 | 990 | |
|
992 | 991 | def test_repo_files_content_is_unicode(self): |
|
993 | 992 | commit = self.repo.get_commit() |
|
994 | 993 | for node in commit.get_node('/'): |
|
995 | 994 | if node.is_file(): |
|
996 | 995 | assert type(node.content) == unicode |
|
997 | 996 | |
|
998 | 997 | def test_wrong_path(self): |
|
999 | 998 | # There is 'setup.py' in the root dir but not there: |
|
1000 | 999 | path = 'foo/bar/setup.py' |
|
1001 | 1000 | tip = self.repo.get_commit() |
|
1002 | 1001 | with pytest.raises(VCSError): |
|
1003 | 1002 | tip.get_node(path) |
|
1004 | 1003 | |
|
1005 | 1004 | @pytest.mark.parametrize("author_email, commit_id", [ |
|
1006 | 1005 | ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'), |
|
1007 | 1006 | ('lukasz.balcerzak@python-center.pl', |
|
1008 | 1007 | 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'), |
|
1009 | 1008 | ('none@none', '8430a588b43b5d6da365400117c89400326e7992'), |
|
1010 | 1009 | ]) |
|
1011 | 1010 | def test_author_email(self, author_email, commit_id): |
|
1012 | 1011 | commit = self.repo.get_commit(commit_id) |
|
1013 | 1012 | assert author_email == commit.author_email |
|
1014 | 1013 | |
|
1015 | 1014 | @pytest.mark.parametrize("author, commit_id", [ |
|
1016 | 1015 | ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'), |
|
1017 | 1016 | ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'), |
|
1018 | 1017 | ('marcink', '8430a588b43b5d6da365400117c89400326e7992'), |
|
1019 | 1018 | ]) |
|
1020 | 1019 | def test_author_username(self, author, commit_id): |
|
1021 | 1020 | commit = self.repo.get_commit(commit_id) |
|
1022 | 1021 | assert author == commit.author_name |
|
1023 | 1022 | |
|
1024 | 1023 | |
|
1025 | 1024 | class TestLargeFileRepo(object): |
|
1026 | 1025 | |
|
1027 | 1026 | def test_large_file(self, backend_git): |
|
1028 | 1027 | conf = make_db_config() |
|
1029 | 1028 | repo = backend_git.create_test_repo('largefiles', conf) |
|
1030 | 1029 | |
|
1031 | 1030 | tip = repo.scm_instance().get_commit() |
|
1032 | 1031 | |
|
1033 | 1032 | # extract stored LF node into the origin cache |
|
1034 | 1033 | lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store') |
|
1035 | 1034 | |
|
1036 | 1035 | oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf' |
|
1037 | 1036 | oid_path = os.path.join(lfs_store, oid) |
|
1038 | 1037 | oid_destination = os.path.join( |
|
1039 | 1038 | conf.get('vcs_git_lfs', 'store_location'), oid) |
|
1040 | 1039 | shutil.copy(oid_path, oid_destination) |
|
1041 | 1040 | |
|
1042 | 1041 | node = tip.get_node('1MB.zip') |
|
1043 | 1042 | |
|
1044 | 1043 | lf_node = node.get_largefile_node() |
|
1045 | 1044 | |
|
1046 | 1045 | assert lf_node.is_largefile() is True |
|
1047 | 1046 | assert lf_node.size == 1024000 |
|
1048 | 1047 | assert lf_node.name == '1MB.zip' |
|
1049 | 1048 | |
|
1050 | 1049 | |
|
1051 | 1050 | @pytest.mark.usefixtures("vcs_repository_support") |
|
1052 | 1051 | class TestGitSpecificWithRepo(BackendTestMixin): |
|
1053 | 1052 | |
|
1054 | 1053 | @classmethod |
|
1055 | 1054 | def _get_commits(cls): |
|
1056 | 1055 | return [ |
|
1057 | 1056 | { |
|
1058 | 1057 | 'message': 'Initial', |
|
1059 | 1058 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
1060 | 1059 | 'date': datetime.datetime(2010, 1, 1, 20), |
|
1061 | 1060 | 'added': [ |
|
1062 | 1061 | FileNode('foobar/static/js/admin/base.js', content='base'), |
|
1063 | 1062 | FileNode( |
|
1064 | 1063 | 'foobar/static/admin', content='admin', |
|
1065 | 1064 | mode=0120000), # this is a link |
|
1066 | 1065 | FileNode('foo', content='foo'), |
|
1067 | 1066 | ], |
|
1068 | 1067 | }, |
|
1069 | 1068 | { |
|
1070 | 1069 | 'message': 'Second', |
|
1071 | 1070 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
1072 | 1071 | 'date': datetime.datetime(2010, 1, 1, 22), |
|
1073 | 1072 | 'added': [ |
|
1074 | 1073 | FileNode('foo2', content='foo2'), |
|
1075 | 1074 | ], |
|
1076 | 1075 | }, |
|
1077 | 1076 | ] |
|
1078 | 1077 | |
|
1079 | 1078 | def test_paths_slow_traversing(self): |
|
1080 | 1079 | commit = self.repo.get_commit() |
|
1081 | 1080 | assert commit.get_node('foobar').get_node('static').get_node('js')\ |
|
1082 | 1081 | .get_node('admin').get_node('base.js').content == 'base' |
|
1083 | 1082 | |
|
1084 | 1083 | def test_paths_fast_traversing(self): |
|
1085 | 1084 | commit = self.repo.get_commit() |
|
1086 | 1085 | assert ( |
|
1087 | 1086 | commit.get_node('foobar/static/js/admin/base.js').content == |
|
1088 | 1087 | 'base') |
|
1089 | 1088 | |
|
1090 | 1089 | def test_get_diff_runs_git_command_with_hashes(self): |
|
1091 | 1090 | self.repo.run_git_command = mock.Mock(return_value=['', '']) |
|
1092 | 1091 | self.repo.get_diff(self.repo[0], self.repo[1]) |
|
1093 | 1092 | self.repo.run_git_command.assert_called_once_with( |
|
1094 | 1093 | ['diff', '-U3', '--full-index', '--binary', '-p', '-M', |
|
1095 | 1094 | '--abbrev=40', self.repo._get_commit_id(0), |
|
1096 | 1095 | self.repo._get_commit_id(1)]) |
|
1097 | 1096 | |
|
1098 | 1097 | def test_get_diff_runs_git_command_with_str_hashes(self): |
|
1099 | 1098 | self.repo.run_git_command = mock.Mock(return_value=['', '']) |
|
1100 | 1099 | self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1]) |
|
1101 | 1100 | self.repo.run_git_command.assert_called_once_with( |
|
1102 | 1101 | ['show', '-U3', '--full-index', '--binary', '-p', '-M', |
|
1103 | 1102 | '--abbrev=40', self.repo._get_commit_id(1)]) |
|
1104 | 1103 | |
|
1105 | 1104 | def test_get_diff_runs_git_command_with_path_if_its_given(self): |
|
1106 | 1105 | self.repo.run_git_command = mock.Mock(return_value=['', '']) |
|
1107 | 1106 | self.repo.get_diff(self.repo[0], self.repo[1], 'foo') |
|
1108 | 1107 | self.repo.run_git_command.assert_called_once_with( |
|
1109 | 1108 | ['diff', '-U3', '--full-index', '--binary', '-p', '-M', |
|
1110 | 1109 | '--abbrev=40', self.repo._get_commit_id(0), |
|
1111 | 1110 | self.repo._get_commit_id(1), '--', 'foo']) |
|
1112 | 1111 | |
|
1113 | 1112 | |
|
1114 | 1113 | @pytest.mark.usefixtures("vcs_repository_support") |
|
1115 | 1114 | class TestGitRegression(BackendTestMixin): |
|
1116 | 1115 | |
|
1117 | 1116 | @classmethod |
|
1118 | 1117 | def _get_commits(cls): |
|
1119 | 1118 | return [ |
|
1120 | 1119 | { |
|
1121 | 1120 | 'message': 'Initial', |
|
1122 | 1121 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
1123 | 1122 | 'date': datetime.datetime(2010, 1, 1, 20), |
|
1124 | 1123 | 'added': [ |
|
1125 | 1124 | FileNode('bot/__init__.py', content='base'), |
|
1126 | 1125 | FileNode('bot/templates/404.html', content='base'), |
|
1127 | 1126 | FileNode('bot/templates/500.html', content='base'), |
|
1128 | 1127 | ], |
|
1129 | 1128 | }, |
|
1130 | 1129 | { |
|
1131 | 1130 | 'message': 'Second', |
|
1132 | 1131 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
1133 | 1132 | 'date': datetime.datetime(2010, 1, 1, 22), |
|
1134 | 1133 | 'added': [ |
|
1135 | 1134 | FileNode('bot/build/migrations/1.py', content='foo2'), |
|
1136 | 1135 | FileNode('bot/build/migrations/2.py', content='foo2'), |
|
1137 | 1136 | FileNode( |
|
1138 | 1137 | 'bot/build/static/templates/f.html', content='foo2'), |
|
1139 | 1138 | FileNode( |
|
1140 | 1139 | 'bot/build/static/templates/f1.html', content='foo2'), |
|
1141 | 1140 | FileNode('bot/build/templates/err.html', content='foo2'), |
|
1142 | 1141 | FileNode('bot/build/templates/err2.html', content='foo2'), |
|
1143 | 1142 | ], |
|
1144 | 1143 | }, |
|
1145 | 1144 | ] |
|
1146 | 1145 | |
|
1147 | 1146 | @pytest.mark.parametrize("path, expected_paths", [ |
|
1148 | 1147 | ('bot', [ |
|
1149 | 1148 | 'bot/build', |
|
1150 | 1149 | 'bot/templates', |
|
1151 | 1150 | 'bot/__init__.py']), |
|
1152 | 1151 | ('bot/build', [ |
|
1153 | 1152 | 'bot/build/migrations', |
|
1154 | 1153 | 'bot/build/static', |
|
1155 | 1154 | 'bot/build/templates']), |
|
1156 | 1155 | ('bot/build/static', [ |
|
1157 | 1156 | 'bot/build/static/templates']), |
|
1158 | 1157 | ('bot/build/static/templates', [ |
|
1159 | 1158 | 'bot/build/static/templates/f.html', |
|
1160 | 1159 | 'bot/build/static/templates/f1.html']), |
|
1161 | 1160 | ('bot/build/templates', [ |
|
1162 | 1161 | 'bot/build/templates/err.html', |
|
1163 | 1162 | 'bot/build/templates/err2.html']), |
|
1164 | 1163 | ('bot/templates/', [ |
|
1165 | 1164 | 'bot/templates/404.html', |
|
1166 | 1165 | 'bot/templates/500.html']), |
|
1167 | 1166 | ]) |
|
1168 | 1167 | def test_similar_paths(self, path, expected_paths): |
|
1169 | 1168 | commit = self.repo.get_commit() |
|
1170 | 1169 | paths = [n.path for n in commit.get_nodes(path)] |
|
1171 | 1170 | assert paths == expected_paths |
|
1172 | 1171 | |
|
1173 | 1172 | |
|
1174 | class TestDiscoverGitVersion: | |
|
1173 | class TestDiscoverGitVersion(object): | |
|
1175 | 1174 | |
|
1176 | 1175 | def test_returns_git_version(self, baseapp): |
|
1177 | 1176 | version = discover_git_version() |
|
1178 | 1177 | assert version |
|
1179 | 1178 | |
|
1180 | 1179 | def test_returns_empty_string_without_vcsserver(self): |
|
1181 | 1180 | mock_connection = mock.Mock() |
|
1182 | 1181 | mock_connection.discover_git_version = mock.Mock( |
|
1183 | 1182 | side_effect=Exception) |
|
1184 | 1183 | with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection): |
|
1185 | 1184 | version = discover_git_version() |
|
1186 | 1185 | assert version == '' |
|
1187 | 1186 | |
|
1188 | 1187 | |
|
1189 | 1188 | class TestGetSubmoduleUrl(object): |
|
1190 | 1189 | def test_submodules_file_found(self): |
|
1191 | 1190 | commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1) |
|
1192 | 1191 | node = mock.Mock() |
|
1193 | 1192 | with mock.patch.object( |
|
1194 | 1193 | commit, 'get_node', return_value=node) as get_node_mock: |
|
1195 | 1194 | node.content = ( |
|
1196 | 1195 | '[submodule "subrepo1"]\n' |
|
1197 | 1196 | '\tpath = subrepo1\n' |
|
1198 | 1197 | '\turl = https://code.rhodecode.com/dulwich\n' |
|
1199 | 1198 | ) |
|
1200 | 1199 | result = commit._get_submodule_url('subrepo1') |
|
1201 | 1200 | get_node_mock.assert_called_once_with('.gitmodules') |
|
1202 | 1201 | assert result == 'https://code.rhodecode.com/dulwich' |
|
1203 | 1202 | |
|
1204 | 1203 | def test_complex_submodule_path(self): |
|
1205 | 1204 | commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1) |
|
1206 | 1205 | node = mock.Mock() |
|
1207 | 1206 | with mock.patch.object( |
|
1208 | 1207 | commit, 'get_node', return_value=node) as get_node_mock: |
|
1209 | 1208 | node.content = ( |
|
1210 | 1209 | '[submodule "complex/subrepo/path"]\n' |
|
1211 | 1210 | '\tpath = complex/subrepo/path\n' |
|
1212 | 1211 | '\turl = https://code.rhodecode.com/dulwich\n' |
|
1213 | 1212 | ) |
|
1214 | 1213 | result = commit._get_submodule_url('complex/subrepo/path') |
|
1215 | 1214 | get_node_mock.assert_called_once_with('.gitmodules') |
|
1216 | 1215 | assert result == 'https://code.rhodecode.com/dulwich' |
|
1217 | 1216 | |
|
1218 | 1217 | def test_submodules_file_not_found(self): |
|
1219 | 1218 | commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1) |
|
1220 | 1219 | with mock.patch.object( |
|
1221 | 1220 | commit, 'get_node', side_effect=NodeDoesNotExistError): |
|
1222 | 1221 | result = commit._get_submodule_url('complex/subrepo/path') |
|
1223 | 1222 | assert result is None |
|
1224 | 1223 | |
|
1225 | 1224 | def test_path_not_found(self): |
|
1226 | 1225 | commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1) |
|
1227 | 1226 | node = mock.Mock() |
|
1228 | 1227 | with mock.patch.object( |
|
1229 | 1228 | commit, 'get_node', return_value=node) as get_node_mock: |
|
1230 | 1229 | node.content = ( |
|
1231 | 1230 | '[submodule "subrepo1"]\n' |
|
1232 | 1231 | '\tpath = subrepo1\n' |
|
1233 | 1232 | '\turl = https://code.rhodecode.com/dulwich\n' |
|
1234 | 1233 | ) |
|
1235 | 1234 | result = commit._get_submodule_url('subrepo2') |
|
1236 | 1235 | get_node_mock.assert_called_once_with('.gitmodules') |
|
1237 | 1236 | assert result is None |
|
1238 | 1237 | |
|
1239 | 1238 | def test_returns_cached_values(self): |
|
1240 | 1239 | commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1) |
|
1241 | 1240 | node = mock.Mock() |
|
1242 | 1241 | with mock.patch.object( |
|
1243 | 1242 | commit, 'get_node', return_value=node) as get_node_mock: |
|
1244 | 1243 | node.content = ( |
|
1245 | 1244 | '[submodule "subrepo1"]\n' |
|
1246 | 1245 | '\tpath = subrepo1\n' |
|
1247 | 1246 | '\turl = https://code.rhodecode.com/dulwich\n' |
|
1248 | 1247 | ) |
|
1249 | 1248 | for _ in range(3): |
|
1250 | 1249 | commit._get_submodule_url('subrepo1') |
|
1251 | 1250 | get_node_mock.assert_called_once_with('.gitmodules') |
|
1252 | 1251 | |
|
1253 | 1252 | def test_get_node_returns_a_link(self): |
|
1254 | 1253 | repository = mock.Mock() |
|
1255 | 1254 | repository.alias = 'git' |
|
1256 | 1255 | commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1) |
|
1257 | 1256 | submodule_url = 'https://code.rhodecode.com/dulwich' |
|
1258 | 1257 | get_id_patch = mock.patch.object( |
|
1259 | 1258 | commit, '_get_id_for_path', return_value=(1, 'link')) |
|
1260 | 1259 | get_submodule_patch = mock.patch.object( |
|
1261 | 1260 | commit, '_get_submodule_url', return_value=submodule_url) |
|
1262 | 1261 | |
|
1263 | 1262 | with get_id_patch, get_submodule_patch as submodule_mock: |
|
1264 | 1263 | node = commit.get_node('/abcde') |
|
1265 | 1264 | |
|
1266 | 1265 | submodule_mock.assert_called_once_with('/abcde') |
|
1267 | 1266 | assert type(node) == SubModuleNode |
|
1268 | 1267 | assert node.url == submodule_url |
|
1269 | 1268 | |
|
1270 | 1269 | def test_get_nodes_returns_links(self): |
|
1271 | 1270 | repository = mock.MagicMock() |
|
1272 | 1271 | repository.alias = 'git' |
|
1273 | 1272 | repository._remote.tree_items.return_value = [ |
|
1274 | 1273 | ('subrepo', 'stat', 1, 'link') |
|
1275 | 1274 | ] |
|
1276 | 1275 | commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1) |
|
1277 | 1276 | submodule_url = 'https://code.rhodecode.com/dulwich' |
|
1278 | 1277 | get_id_patch = mock.patch.object( |
|
1279 | 1278 | commit, '_get_id_for_path', return_value=(1, 'tree')) |
|
1280 | 1279 | get_submodule_patch = mock.patch.object( |
|
1281 | 1280 | commit, '_get_submodule_url', return_value=submodule_url) |
|
1282 | 1281 | |
|
1283 | 1282 | with get_id_patch, get_submodule_patch as submodule_mock: |
|
1284 | 1283 | nodes = commit.get_nodes('/abcde') |
|
1285 | 1284 | |
|
1286 | 1285 | submodule_mock.assert_called_once_with('/abcde/subrepo') |
|
1287 | 1286 | assert len(nodes) == 1 |
|
1288 | 1287 | assert type(nodes[0]) == SubModuleNode |
|
1289 | 1288 | assert nodes[0].url == submodule_url |
@@ -1,1186 +1,1186 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | |
|
23 | 23 | import mock |
|
24 | 24 | import pytest |
|
25 | 25 | |
|
26 | 26 | from rhodecode.lib.utils import make_db_config |
|
27 | 27 | from rhodecode.lib.vcs import backends |
|
28 | 28 | from rhodecode.lib.vcs.backends.base import ( |
|
29 | 29 | Reference, MergeResponse, MergeFailureReason) |
|
30 | 30 | from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit |
|
31 | 31 | from rhodecode.lib.vcs.exceptions import ( |
|
32 | 32 | RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError) |
|
33 | 33 | from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState |
|
34 | 34 | from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | pytestmark = pytest.mark.backends("hg") |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | def repo_path_generator(): |
|
41 | 41 | """ |
|
42 | 42 | Return a different path to be used for cloning repos. |
|
43 | 43 | """ |
|
44 | 44 | i = 0 |
|
45 | 45 | while True: |
|
46 | 46 | i += 1 |
|
47 | 47 | yield '%s-%d' % (TEST_HG_REPO_CLONE, i) |
|
48 | 48 | |
|
49 | 49 | REPO_PATH_GENERATOR = repo_path_generator() |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | @pytest.fixture(scope='class', autouse=True) |
|
53 | 53 | def repo(request, baseapp): |
|
54 | 54 | repo = MercurialRepository(TEST_HG_REPO) |
|
55 | 55 | if request.cls: |
|
56 | 56 | request.cls.repo = repo |
|
57 | 57 | return repo |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | class TestMercurialRepository: |
|
61 | 61 | |
|
62 | 62 | # pylint: disable=protected-access |
|
63 | 63 | |
|
64 | 64 | def get_clone_repo(self): |
|
65 | 65 | """ |
|
66 | 66 | Return a clone of the base repo. |
|
67 | 67 | """ |
|
68 | 68 | clone_path = next(REPO_PATH_GENERATOR) |
|
69 | 69 | repo_clone = MercurialRepository( |
|
70 | 70 | clone_path, create=True, src_url=self.repo.path) |
|
71 | 71 | |
|
72 | 72 | return repo_clone |
|
73 | 73 | |
|
74 | 74 | def get_empty_repo(self): |
|
75 | 75 | """ |
|
76 | 76 | Return an empty repo. |
|
77 | 77 | """ |
|
78 | 78 | return MercurialRepository(next(REPO_PATH_GENERATOR), create=True) |
|
79 | 79 | |
|
80 | 80 | def test_wrong_repo_path(self): |
|
81 | 81 | wrong_repo_path = '/tmp/errorrepo_hg' |
|
82 | 82 | with pytest.raises(RepositoryError): |
|
83 | 83 | MercurialRepository(wrong_repo_path) |
|
84 | 84 | |
|
85 | 85 | def test_unicode_path_repo(self): |
|
86 | 86 | with pytest.raises(VCSError): |
|
87 | 87 | MercurialRepository(u'iShouldFail') |
|
88 | 88 | |
|
89 | 89 | def test_unicode_commit_id(self): |
|
90 | 90 | with pytest.raises(CommitDoesNotExistError): |
|
91 | 91 | self.repo.get_commit(u'unicode-commit-id') |
|
92 | 92 | with pytest.raises(CommitDoesNotExistError): |
|
93 | 93 | self.repo.get_commit(u'unΓcΓΈde-spéçial-chΓ€rΓ₯cter-commit-id') |
|
94 | 94 | |
|
95 | 95 | def test_unicode_bookmark(self): |
|
96 | 96 | self.repo.bookmark(u'unicode-bookmark') |
|
97 | 97 | self.repo.bookmark(u'unΓcΓΈde-spéçial-chΓ€rΓ₯cter-bookmark') |
|
98 | 98 | |
|
99 | 99 | def test_unicode_branch(self): |
|
100 | 100 | with pytest.raises(KeyError): |
|
101 | 101 | self.repo.branches[u'unicode-branch'] |
|
102 | 102 | with pytest.raises(KeyError): |
|
103 | 103 | self.repo.branches[u'unΓcΓΈde-spéçial-chΓ€rΓ₯cter-branch'] |
|
104 | 104 | |
|
105 | 105 | def test_repo_clone(self): |
|
106 | 106 | if os.path.exists(TEST_HG_REPO_CLONE): |
|
107 | 107 | self.fail( |
|
108 | 108 | 'Cannot test mercurial clone repo as location %s already ' |
|
109 | 109 | 'exists. You should manually remove it first.' |
|
110 | 110 | % TEST_HG_REPO_CLONE) |
|
111 | 111 | |
|
112 | 112 | repo = MercurialRepository(TEST_HG_REPO) |
|
113 | 113 | repo_clone = MercurialRepository(TEST_HG_REPO_CLONE, |
|
114 | 114 | src_url=TEST_HG_REPO) |
|
115 | 115 | assert len(repo.commit_ids) == len(repo_clone.commit_ids) |
|
116 | 116 | # Checking hashes of commits should be enough |
|
117 | 117 | for commit in repo.get_commits(): |
|
118 | 118 | raw_id = commit.raw_id |
|
119 | 119 | assert raw_id == repo_clone.get_commit(raw_id).raw_id |
|
120 | 120 | |
|
121 | 121 | def test_repo_clone_with_update(self): |
|
122 | 122 | repo = MercurialRepository(TEST_HG_REPO) |
|
123 | 123 | repo_clone = MercurialRepository( |
|
124 | 124 | TEST_HG_REPO_CLONE + '_w_update', |
|
125 |
src_url=TEST_HG_REPO, |
|
|
125 | src_url=TEST_HG_REPO, do_workspace_checkout=True) | |
|
126 | 126 | assert len(repo.commit_ids) == len(repo_clone.commit_ids) |
|
127 | 127 | |
|
128 | 128 | # check if current workdir was updated |
|
129 | 129 | assert os.path.isfile( |
|
130 | 130 | os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in')) |
|
131 | 131 | |
|
132 | 132 | def test_repo_clone_without_update(self): |
|
133 | 133 | repo = MercurialRepository(TEST_HG_REPO) |
|
134 | 134 | repo_clone = MercurialRepository( |
|
135 | 135 | TEST_HG_REPO_CLONE + '_wo_update', |
|
136 |
src_url=TEST_HG_REPO, |
|
|
136 | src_url=TEST_HG_REPO, do_workspace_checkout=False) | |
|
137 | 137 | assert len(repo.commit_ids) == len(repo_clone.commit_ids) |
|
138 | 138 | assert not os.path.isfile( |
|
139 | 139 | os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in')) |
|
140 | 140 | |
|
141 | 141 | def test_commit_ids(self): |
|
142 | 142 | # there are 21 commits at bitbucket now |
|
143 | 143 | # so we can assume they would be available from now on |
|
144 | 144 | subset = set([ |
|
145 | 145 | 'b986218ba1c9b0d6a259fac9b050b1724ed8e545', |
|
146 | 146 | '3d8f361e72ab303da48d799ff1ac40d5ac37c67e', |
|
147 | 147 | '6cba7170863a2411822803fa77a0a264f1310b35', |
|
148 | 148 | '56349e29c2af3ac913b28bde9a2c6154436e615b', |
|
149 | 149 | '2dda4e345facb0ccff1a191052dd1606dba6781d', |
|
150 | 150 | '6fff84722075f1607a30f436523403845f84cd9e', |
|
151 | 151 | '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', |
|
152 | 152 | '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb', |
|
153 | 153 | 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', |
|
154 | 154 | 'be90031137367893f1c406e0a8683010fd115b79', |
|
155 | 155 | 'db8e58be770518cbb2b1cdfa69146e47cd481481', |
|
156 | 156 | '84478366594b424af694a6c784cb991a16b87c21', |
|
157 | 157 | '17f8e105dddb9f339600389c6dc7175d395a535c', |
|
158 | 158 | '20a662e756499bde3095ffc9bc0643d1def2d0eb', |
|
159 | 159 | '2e319b85e70a707bba0beff866d9f9de032aa4f9', |
|
160 | 160 | '786facd2c61deb9cf91e9534735124fb8fc11842', |
|
161 | 161 | '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', |
|
162 | 162 | 'aa6a0de05b7612707db567078e130a6cd114a9a7', |
|
163 | 163 | 'eada5a770da98ab0dd7325e29d00e0714f228d09' |
|
164 | 164 | ]) |
|
165 | 165 | assert subset.issubset(set(self.repo.commit_ids)) |
|
166 | 166 | |
|
167 | 167 | # check if we have the proper order of commits |
|
168 | 168 | org = [ |
|
169 | 169 | 'b986218ba1c9b0d6a259fac9b050b1724ed8e545', |
|
170 | 170 | '3d8f361e72ab303da48d799ff1ac40d5ac37c67e', |
|
171 | 171 | '6cba7170863a2411822803fa77a0a264f1310b35', |
|
172 | 172 | '56349e29c2af3ac913b28bde9a2c6154436e615b', |
|
173 | 173 | '2dda4e345facb0ccff1a191052dd1606dba6781d', |
|
174 | 174 | '6fff84722075f1607a30f436523403845f84cd9e', |
|
175 | 175 | '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', |
|
176 | 176 | '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb', |
|
177 | 177 | 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', |
|
178 | 178 | 'be90031137367893f1c406e0a8683010fd115b79', |
|
179 | 179 | 'db8e58be770518cbb2b1cdfa69146e47cd481481', |
|
180 | 180 | '84478366594b424af694a6c784cb991a16b87c21', |
|
181 | 181 | '17f8e105dddb9f339600389c6dc7175d395a535c', |
|
182 | 182 | '20a662e756499bde3095ffc9bc0643d1def2d0eb', |
|
183 | 183 | '2e319b85e70a707bba0beff866d9f9de032aa4f9', |
|
184 | 184 | '786facd2c61deb9cf91e9534735124fb8fc11842', |
|
185 | 185 | '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', |
|
186 | 186 | 'aa6a0de05b7612707db567078e130a6cd114a9a7', |
|
187 | 187 | 'eada5a770da98ab0dd7325e29d00e0714f228d09', |
|
188 | 188 | '2c1885c735575ca478bf9e17b0029dca68824458', |
|
189 | 189 | 'd9bcd465040bf869799b09ad732c04e0eea99fe9', |
|
190 | 190 | '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7', |
|
191 | 191 | '4fb8326d78e5120da2c7468dcf7098997be385da', |
|
192 | 192 | '62b4a097164940bd66030c4db51687f3ec035eed', |
|
193 | 193 | '536c1a19428381cfea92ac44985304f6a8049569', |
|
194 | 194 | '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4', |
|
195 | 195 | '9bb326a04ae5d98d437dece54be04f830cf1edd9', |
|
196 | 196 | 'f8940bcb890a98c4702319fbe36db75ea309b475', |
|
197 | 197 | 'ff5ab059786ebc7411e559a2cc309dfae3625a3b', |
|
198 | 198 | '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08', |
|
199 | 199 | 'ee87846a61c12153b51543bf860e1026c6d3dcba', |
|
200 | 200 | ] |
|
201 | 201 | assert org == self.repo.commit_ids[:31] |
|
202 | 202 | |
|
203 | 203 | def test_iter_slice(self): |
|
204 | 204 | sliced = list(self.repo[:10]) |
|
205 | 205 | itered = list(self.repo)[:10] |
|
206 | 206 | assert sliced == itered |
|
207 | 207 | |
|
208 | 208 | def test_slicing(self): |
|
209 | 209 | # 4 1 5 10 95 |
|
210 | 210 | for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), |
|
211 | 211 | (10, 20, 10), (5, 100, 95)]: |
|
212 | 212 | indexes = list(self.repo[sfrom:sto]) |
|
213 | 213 | assert len(indexes) == size |
|
214 | 214 | assert indexes[0] == self.repo.get_commit(commit_idx=sfrom) |
|
215 | 215 | assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1) |
|
216 | 216 | |
|
217 | 217 | def test_branches(self): |
|
218 | 218 | # TODO: Need more tests here |
|
219 | 219 | |
|
220 | 220 | # active branches |
|
221 | 221 | assert 'default' in self.repo.branches |
|
222 | 222 | assert 'stable' in self.repo.branches |
|
223 | 223 | |
|
224 | 224 | # closed |
|
225 | 225 | assert 'git' in self.repo._get_branches(closed=True) |
|
226 | 226 | assert 'web' in self.repo._get_branches(closed=True) |
|
227 | 227 | |
|
228 | 228 | for name, id in self.repo.branches.items(): |
|
229 | 229 | assert isinstance(self.repo.get_commit(id), MercurialCommit) |
|
230 | 230 | |
|
231 | 231 | def test_tip_in_tags(self): |
|
232 | 232 | # tip is always a tag |
|
233 | 233 | assert 'tip' in self.repo.tags |
|
234 | 234 | |
|
235 | 235 | def test_tip_commit_in_tags(self): |
|
236 | 236 | tip = self.repo.get_commit() |
|
237 | 237 | assert self.repo.tags['tip'] == tip.raw_id |
|
238 | 238 | |
|
239 | 239 | def test_initial_commit(self): |
|
240 | 240 | init_commit = self.repo.get_commit(commit_idx=0) |
|
241 | 241 | init_author = init_commit.author |
|
242 | 242 | |
|
243 | 243 | assert init_commit.message == 'initial import' |
|
244 | 244 | assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>' |
|
245 | 245 | assert init_author == init_commit.committer |
|
246 | 246 | assert sorted(init_commit._file_paths) == sorted([ |
|
247 | 247 | 'vcs/__init__.py', |
|
248 | 248 | 'vcs/backends/BaseRepository.py', |
|
249 | 249 | 'vcs/backends/__init__.py', |
|
250 | 250 | ]) |
|
251 | 251 | assert sorted(init_commit._dir_paths) == sorted( |
|
252 | 252 | ['', 'vcs', 'vcs/backends']) |
|
253 | 253 | |
|
254 | 254 | assert init_commit._dir_paths + init_commit._file_paths == \ |
|
255 | 255 | init_commit._paths |
|
256 | 256 | |
|
257 | 257 | with pytest.raises(NodeDoesNotExistError): |
|
258 | 258 | init_commit.get_node(path='foobar') |
|
259 | 259 | |
|
260 | 260 | node = init_commit.get_node('vcs/') |
|
261 | 261 | assert hasattr(node, 'kind') |
|
262 | 262 | assert node.kind == NodeKind.DIR |
|
263 | 263 | |
|
264 | 264 | node = init_commit.get_node('vcs') |
|
265 | 265 | assert hasattr(node, 'kind') |
|
266 | 266 | assert node.kind == NodeKind.DIR |
|
267 | 267 | |
|
268 | 268 | node = init_commit.get_node('vcs/__init__.py') |
|
269 | 269 | assert hasattr(node, 'kind') |
|
270 | 270 | assert node.kind == NodeKind.FILE |
|
271 | 271 | |
|
272 | 272 | def test_not_existing_commit(self): |
|
273 | 273 | # rawid |
|
274 | 274 | with pytest.raises(RepositoryError): |
|
275 | 275 | self.repo.get_commit('abcd' * 10) |
|
276 | 276 | # shortid |
|
277 | 277 | with pytest.raises(RepositoryError): |
|
278 | 278 | self.repo.get_commit('erro' * 4) |
|
279 | 279 | # numeric |
|
280 | 280 | with pytest.raises(RepositoryError): |
|
281 | 281 | self.repo.get_commit(commit_idx=self.repo.count() + 1) |
|
282 | 282 | |
|
283 | 283 | # Small chance we ever get to this one |
|
284 | 284 | idx = pow(2, 30) |
|
285 | 285 | with pytest.raises(RepositoryError): |
|
286 | 286 | self.repo.get_commit(commit_idx=idx) |
|
287 | 287 | |
|
288 | 288 | def test_commit10(self): |
|
289 | 289 | commit10 = self.repo.get_commit(commit_idx=10) |
|
290 | 290 | README = """=== |
|
291 | 291 | VCS |
|
292 | 292 | === |
|
293 | 293 | |
|
294 | 294 | Various Version Control System management abstraction layer for Python. |
|
295 | 295 | |
|
296 | 296 | Introduction |
|
297 | 297 | ------------ |
|
298 | 298 | |
|
299 | 299 | TODO: To be written... |
|
300 | 300 | |
|
301 | 301 | """ |
|
302 | 302 | node = commit10.get_node('README.rst') |
|
303 | 303 | assert node.kind == NodeKind.FILE |
|
304 | 304 | assert node.content == README |
|
305 | 305 | |
|
306 | 306 | def test_local_clone(self): |
|
307 | 307 | clone_path = next(REPO_PATH_GENERATOR) |
|
308 | 308 | self.repo._local_clone(clone_path) |
|
309 | 309 | repo_clone = MercurialRepository(clone_path) |
|
310 | 310 | |
|
311 | 311 | assert self.repo.commit_ids == repo_clone.commit_ids |
|
312 | 312 | |
|
313 | 313 | def test_local_clone_fails_if_target_exists(self): |
|
314 | 314 | with pytest.raises(RepositoryError): |
|
315 | 315 | self.repo._local_clone(self.repo.path) |
|
316 | 316 | |
|
317 | 317 | def test_update(self): |
|
318 | 318 | repo_clone = self.get_clone_repo() |
|
319 | 319 | branches = repo_clone.branches |
|
320 | 320 | |
|
321 | 321 | repo_clone._update('default') |
|
322 | 322 | assert branches['default'] == repo_clone._identify() |
|
323 | 323 | repo_clone._update('stable') |
|
324 | 324 | assert branches['stable'] == repo_clone._identify() |
|
325 | 325 | |
|
326 | 326 | def test_local_pull_branch(self): |
|
327 | 327 | target_repo = self.get_empty_repo() |
|
328 | 328 | source_repo = self.get_clone_repo() |
|
329 | 329 | |
|
330 | 330 | default = Reference( |
|
331 | 331 | 'branch', 'default', source_repo.branches['default']) |
|
332 | 332 | target_repo._local_pull(source_repo.path, default) |
|
333 | 333 | target_repo = MercurialRepository(target_repo.path) |
|
334 | 334 | assert (target_repo.branches['default'] == |
|
335 | 335 | source_repo.branches['default']) |
|
336 | 336 | |
|
337 | 337 | stable = Reference('branch', 'stable', source_repo.branches['stable']) |
|
338 | 338 | target_repo._local_pull(source_repo.path, stable) |
|
339 | 339 | target_repo = MercurialRepository(target_repo.path) |
|
340 | 340 | assert target_repo.branches['stable'] == source_repo.branches['stable'] |
|
341 | 341 | |
|
342 | 342 | def test_local_pull_bookmark(self): |
|
343 | 343 | target_repo = self.get_empty_repo() |
|
344 | 344 | source_repo = self.get_clone_repo() |
|
345 | 345 | |
|
346 | 346 | commits = list(source_repo.get_commits(branch_name='default')) |
|
347 | 347 | foo1_id = commits[-5].raw_id |
|
348 | 348 | foo1 = Reference('book', 'foo1', foo1_id) |
|
349 | 349 | source_repo._update(foo1_id) |
|
350 | 350 | source_repo.bookmark('foo1') |
|
351 | 351 | |
|
352 | 352 | foo2_id = commits[-3].raw_id |
|
353 | 353 | foo2 = Reference('book', 'foo2', foo2_id) |
|
354 | 354 | source_repo._update(foo2_id) |
|
355 | 355 | source_repo.bookmark('foo2') |
|
356 | 356 | |
|
357 | 357 | target_repo._local_pull(source_repo.path, foo1) |
|
358 | 358 | target_repo = MercurialRepository(target_repo.path) |
|
359 | 359 | assert target_repo.branches['default'] == commits[-5].raw_id |
|
360 | 360 | |
|
361 | 361 | target_repo._local_pull(source_repo.path, foo2) |
|
362 | 362 | target_repo = MercurialRepository(target_repo.path) |
|
363 | 363 | assert target_repo.branches['default'] == commits[-3].raw_id |
|
364 | 364 | |
|
365 | 365 | def test_local_pull_commit(self): |
|
366 | 366 | target_repo = self.get_empty_repo() |
|
367 | 367 | source_repo = self.get_clone_repo() |
|
368 | 368 | |
|
369 | 369 | commits = list(source_repo.get_commits(branch_name='default')) |
|
370 | 370 | commit_id = commits[-5].raw_id |
|
371 | 371 | commit = Reference('rev', commit_id, commit_id) |
|
372 | 372 | target_repo._local_pull(source_repo.path, commit) |
|
373 | 373 | target_repo = MercurialRepository(target_repo.path) |
|
374 | 374 | assert target_repo.branches['default'] == commit_id |
|
375 | 375 | |
|
376 | 376 | commit_id = commits[-3].raw_id |
|
377 | 377 | commit = Reference('rev', commit_id, commit_id) |
|
378 | 378 | target_repo._local_pull(source_repo.path, commit) |
|
379 | 379 | target_repo = MercurialRepository(target_repo.path) |
|
380 | 380 | assert target_repo.branches['default'] == commit_id |
|
381 | 381 | |
|
382 | 382 | def test_local_pull_from_same_repo(self): |
|
383 | 383 | reference = Reference('branch', 'default', None) |
|
384 | 384 | with pytest.raises(ValueError): |
|
385 | 385 | self.repo._local_pull(self.repo.path, reference) |
|
386 | 386 | |
|
387 | 387 | def test_validate_pull_reference_raises_on_missing_reference( |
|
388 | 388 | self, vcsbackend_hg): |
|
389 | 389 | target_repo = vcsbackend_hg.create_repo(number_of_commits=1) |
|
390 | 390 | reference = Reference( |
|
391 | 391 | 'book', 'invalid_reference', 'a' * 40) |
|
392 | 392 | |
|
393 | 393 | with pytest.raises(CommitDoesNotExistError): |
|
394 | 394 | target_repo._validate_pull_reference(reference) |
|
395 | 395 | |
|
396 | 396 | def test_heads(self): |
|
397 | 397 | assert set(self.repo._heads()) == set(self.repo.branches.values()) |
|
398 | 398 | |
|
399 | 399 | def test_ancestor(self): |
|
400 | 400 | commits = [ |
|
401 | 401 | c.raw_id for c in self.repo.get_commits(branch_name='default')] |
|
402 | 402 | assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5] |
|
403 | 403 | assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5] |
|
404 | 404 | |
|
405 | 405 | def test_local_push(self): |
|
406 | 406 | target_repo = self.get_empty_repo() |
|
407 | 407 | |
|
408 | 408 | revisions = list(self.repo.get_commits(branch_name='default')) |
|
409 | 409 | revision = revisions[-5].raw_id |
|
410 | 410 | self.repo._local_push(revision, target_repo.path) |
|
411 | 411 | |
|
412 | 412 | target_repo = MercurialRepository(target_repo.path) |
|
413 | 413 | |
|
414 | 414 | assert target_repo.branches['default'] == revision |
|
415 | 415 | |
|
416 | 416 | def test_hooks_can_be_enabled_for_local_push(self): |
|
417 | 417 | revision = 'deadbeef' |
|
418 | 418 | repo_path = 'test_group/test_repo' |
|
419 | 419 | with mock.patch.object(self.repo, '_remote') as remote_mock: |
|
420 | 420 | self.repo._local_push(revision, repo_path, enable_hooks=True) |
|
421 | 421 | remote_mock.push.assert_called_once_with( |
|
422 | 422 | [revision], repo_path, hooks=True, push_branches=False) |
|
423 | 423 | |
|
424 | 424 | def test_local_merge(self, vcsbackend_hg): |
|
425 | 425 | target_repo = vcsbackend_hg.create_repo(number_of_commits=1) |
|
426 | 426 | source_repo = vcsbackend_hg.clone_repo(target_repo) |
|
427 | 427 | vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1') |
|
428 | 428 | target_repo = MercurialRepository(target_repo.path) |
|
429 | 429 | target_rev = target_repo.branches['default'] |
|
430 | 430 | target_ref = Reference( |
|
431 | 431 | type='branch', name='default', commit_id=target_rev) |
|
432 | 432 | vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2') |
|
433 | 433 | source_repo = MercurialRepository(source_repo.path) |
|
434 | 434 | source_rev = source_repo.branches['default'] |
|
435 | 435 | source_ref = Reference( |
|
436 | 436 | type='branch', name='default', commit_id=source_rev) |
|
437 | 437 | |
|
438 | 438 | target_repo._local_pull(source_repo.path, source_ref) |
|
439 | 439 | |
|
440 | 440 | merge_message = 'Merge message\n\nDescription:...' |
|
441 | 441 | user_name = 'Albert Einstein' |
|
442 | 442 | user_email = 'albert@einstein.com' |
|
443 | 443 | merge_commit_id, needs_push = target_repo._local_merge( |
|
444 | 444 | target_ref, merge_message, user_name, user_email, source_ref) |
|
445 | 445 | assert needs_push |
|
446 | 446 | |
|
447 | 447 | target_repo = MercurialRepository(target_repo.path) |
|
448 | 448 | assert target_repo.commit_ids[-3] == target_rev |
|
449 | 449 | assert target_repo.commit_ids[-2] == source_rev |
|
450 | 450 | last_commit = target_repo.get_commit(merge_commit_id) |
|
451 | 451 | assert last_commit.message.strip() == merge_message |
|
452 | 452 | assert last_commit.author == '%s <%s>' % (user_name, user_email) |
|
453 | 453 | |
|
454 | 454 | assert not os.path.exists( |
|
455 | 455 | os.path.join(target_repo.path, '.hg', 'merge', 'state')) |
|
456 | 456 | |
|
457 | 457 | def test_local_merge_source_is_fast_forward(self, vcsbackend_hg): |
|
458 | 458 | target_repo = vcsbackend_hg.create_repo(number_of_commits=1) |
|
459 | 459 | source_repo = vcsbackend_hg.clone_repo(target_repo) |
|
460 | 460 | target_rev = target_repo.branches['default'] |
|
461 | 461 | target_ref = Reference( |
|
462 | 462 | type='branch', name='default', commit_id=target_rev) |
|
463 | 463 | vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2') |
|
464 | 464 | source_repo = MercurialRepository(source_repo.path) |
|
465 | 465 | source_rev = source_repo.branches['default'] |
|
466 | 466 | source_ref = Reference( |
|
467 | 467 | type='branch', name='default', commit_id=source_rev) |
|
468 | 468 | |
|
469 | 469 | target_repo._local_pull(source_repo.path, source_ref) |
|
470 | 470 | |
|
471 | 471 | merge_message = 'Merge message\n\nDescription:...' |
|
472 | 472 | user_name = 'Albert Einstein' |
|
473 | 473 | user_email = 'albert@einstein.com' |
|
474 | 474 | merge_commit_id, needs_push = target_repo._local_merge( |
|
475 | 475 | target_ref, merge_message, user_name, user_email, source_ref) |
|
476 | 476 | assert merge_commit_id == source_rev |
|
477 | 477 | assert needs_push |
|
478 | 478 | |
|
479 | 479 | target_repo = MercurialRepository(target_repo.path) |
|
480 | 480 | assert target_repo.commit_ids[-2] == target_rev |
|
481 | 481 | assert target_repo.commit_ids[-1] == source_rev |
|
482 | 482 | |
|
483 | 483 | assert not os.path.exists( |
|
484 | 484 | os.path.join(target_repo.path, '.hg', 'merge', 'state')) |
|
485 | 485 | |
|
486 | 486 | def test_local_merge_source_is_integrated(self, vcsbackend_hg): |
|
487 | 487 | target_repo = vcsbackend_hg.create_repo(number_of_commits=1) |
|
488 | 488 | target_rev = target_repo.branches['default'] |
|
489 | 489 | target_ref = Reference( |
|
490 | 490 | type='branch', name='default', commit_id=target_rev) |
|
491 | 491 | |
|
492 | 492 | merge_message = 'Merge message\n\nDescription:...' |
|
493 | 493 | user_name = 'Albert Einstein' |
|
494 | 494 | user_email = 'albert@einstein.com' |
|
495 | 495 | merge_commit_id, needs_push = target_repo._local_merge( |
|
496 | 496 | target_ref, merge_message, user_name, user_email, target_ref) |
|
497 | 497 | assert merge_commit_id == target_rev |
|
498 | 498 | assert not needs_push |
|
499 | 499 | |
|
500 | 500 | target_repo = MercurialRepository(target_repo.path) |
|
501 | 501 | assert target_repo.commit_ids[-1] == target_rev |
|
502 | 502 | |
|
503 | 503 | assert not os.path.exists( |
|
504 | 504 | os.path.join(target_repo.path, '.hg', 'merge', 'state')) |
|
505 | 505 | |
|
506 | 506 | def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg): |
|
507 | 507 | target_repo = vcsbackend_hg.create_repo(number_of_commits=1) |
|
508 | 508 | source_repo = vcsbackend_hg.clone_repo(target_repo) |
|
509 | 509 | vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1') |
|
510 | 510 | target_repo = MercurialRepository(target_repo.path) |
|
511 | 511 | target_rev = target_repo.branches['default'] |
|
512 | 512 | target_ref = Reference( |
|
513 | 513 | type='branch', name='default', commit_id=target_rev) |
|
514 | 514 | vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2') |
|
515 | 515 | source_repo = MercurialRepository(source_repo.path) |
|
516 | 516 | source_rev = source_repo.branches['default'] |
|
517 | 517 | source_ref = Reference( |
|
518 | 518 | type='branch', name='default', commit_id=source_rev) |
|
519 | 519 | |
|
520 | 520 | target_repo._local_pull(source_repo.path, source_ref) |
|
521 | 521 | with pytest.raises(RepositoryError): |
|
522 | 522 | target_repo._local_merge( |
|
523 | 523 | target_ref, 'merge_message', 'user name', 'user@name.com', |
|
524 | 524 | source_ref) |
|
525 | 525 | |
|
526 | 526 | # Check we are not left in an intermediate merge state |
|
527 | 527 | assert not os.path.exists( |
|
528 | 528 | os.path.join(target_repo.path, '.hg', 'merge', 'state')) |
|
529 | 529 | |
|
530 | 530 | def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg): |
|
531 | 531 | commits = [ |
|
532 | 532 | {'message': 'a'}, |
|
533 | 533 | {'message': 'b', 'branch': 'b'}, |
|
534 | 534 | ] |
|
535 | 535 | repo = backend_hg.create_repo(commits) |
|
536 | 536 | commit_ids = backend_hg.commit_ids |
|
537 | 537 | target_ref = Reference( |
|
538 | 538 | type='branch', name='default', commit_id=commit_ids['a']) |
|
539 | 539 | source_ref = Reference( |
|
540 | 540 | type='branch', name='b', commit_id=commit_ids['b']) |
|
541 | 541 | merge_message = 'Merge message\n\nDescription:...' |
|
542 | 542 | user_name = 'Albert Einstein' |
|
543 | 543 | user_email = 'albert@einstein.com' |
|
544 | 544 | vcs_repo = repo.scm_instance() |
|
545 | 545 | merge_commit_id, needs_push = vcs_repo._local_merge( |
|
546 | 546 | target_ref, merge_message, user_name, user_email, source_ref) |
|
547 | 547 | assert merge_commit_id != source_ref.commit_id |
|
548 | 548 | assert needs_push is True |
|
549 | 549 | commit = vcs_repo.get_commit(merge_commit_id) |
|
550 | 550 | assert commit.merge is True |
|
551 | 551 | assert commit.message == merge_message |
|
552 | 552 | |
|
553 | 553 | def test_maybe_prepare_merge_workspace(self): |
|
554 | 554 | workspace = self.repo._maybe_prepare_merge_workspace( |
|
555 | 555 | 1, 'pr2', 'unused', 'unused2') |
|
556 | 556 | |
|
557 | 557 | assert os.path.isdir(workspace) |
|
558 | 558 | workspace_repo = MercurialRepository(workspace) |
|
559 | 559 | assert workspace_repo.branches == self.repo.branches |
|
560 | 560 | |
|
561 | 561 | # Calling it a second time should also succeed |
|
562 | 562 | workspace = self.repo._maybe_prepare_merge_workspace( |
|
563 | 563 | 1, 'pr2', 'unused', 'unused2') |
|
564 | 564 | assert os.path.isdir(workspace) |
|
565 | 565 | |
|
566 | 566 | def test_cleanup_merge_workspace(self): |
|
567 | 567 | workspace = self.repo._maybe_prepare_merge_workspace( |
|
568 | 568 | 1, 'pr3', 'unused', 'unused2') |
|
569 | 569 | |
|
570 | 570 | assert os.path.isdir(workspace) |
|
571 | 571 | self.repo.cleanup_merge_workspace(1, 'pr3') |
|
572 | 572 | |
|
573 | 573 | assert not os.path.exists(workspace) |
|
574 | 574 | |
|
575 | 575 | def test_cleanup_merge_workspace_invalid_workspace_id(self): |
|
576 | 576 | # No assert: because in case of an inexistent workspace this function |
|
577 | 577 | # should still succeed. |
|
578 | 578 | self.repo.cleanup_merge_workspace(1, 'pr4') |
|
579 | 579 | |
|
580 | 580 | def test_merge_target_is_bookmark(self, vcsbackend_hg): |
|
581 | 581 | target_repo = vcsbackend_hg.create_repo(number_of_commits=1) |
|
582 | 582 | source_repo = vcsbackend_hg.clone_repo(target_repo) |
|
583 | 583 | vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1') |
|
584 | 584 | vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2') |
|
585 | 585 | imc = source_repo.in_memory_commit |
|
586 | 586 | imc.add(FileNode('file_x', content=source_repo.name)) |
|
587 | 587 | imc.commit( |
|
588 | 588 | message=u'Automatic commit from repo merge test', |
|
589 | 589 | author=u'Automatic') |
|
590 | 590 | target_commit = target_repo.get_commit() |
|
591 | 591 | source_commit = source_repo.get_commit() |
|
592 | 592 | default_branch = target_repo.DEFAULT_BRANCH_NAME |
|
593 | 593 | bookmark_name = 'bookmark' |
|
594 | 594 | target_repo._update(default_branch) |
|
595 | 595 | target_repo.bookmark(bookmark_name) |
|
596 | 596 | target_ref = Reference('book', bookmark_name, target_commit.raw_id) |
|
597 | 597 | source_ref = Reference('branch', default_branch, source_commit.raw_id) |
|
598 | 598 | workspace_id = 'test-merge' |
|
599 | 599 | repo_id = repo_id_generator(target_repo.path) |
|
600 | 600 | merge_response = target_repo.merge( |
|
601 | 601 | repo_id, workspace_id, target_ref, source_repo, source_ref, |
|
602 | 602 | 'test user', 'test@rhodecode.com', 'merge message 1', |
|
603 | 603 | dry_run=False) |
|
604 | 604 | expected_merge_response = MergeResponse( |
|
605 | 605 | True, True, merge_response.merge_ref, |
|
606 | 606 | MergeFailureReason.NONE) |
|
607 | 607 | assert merge_response == expected_merge_response |
|
608 | 608 | |
|
609 | 609 | target_repo = backends.get_backend(vcsbackend_hg.alias)( |
|
610 | 610 | target_repo.path) |
|
611 | 611 | target_commits = list(target_repo.get_commits()) |
|
612 | 612 | commit_ids = [c.raw_id for c in target_commits[:-1]] |
|
613 | 613 | assert source_ref.commit_id in commit_ids |
|
614 | 614 | assert target_ref.commit_id in commit_ids |
|
615 | 615 | |
|
616 | 616 | merge_commit = target_commits[-1] |
|
617 | 617 | assert merge_commit.raw_id == merge_response.merge_ref.commit_id |
|
618 | 618 | assert merge_commit.message.strip() == 'merge message 1' |
|
619 | 619 | assert merge_commit.author == 'test user <test@rhodecode.com>' |
|
620 | 620 | |
|
621 | 621 | # Check the bookmark was updated in the target repo |
|
622 | 622 | assert ( |
|
623 | 623 | target_repo.bookmarks[bookmark_name] == |
|
624 | 624 | merge_response.merge_ref.commit_id) |
|
625 | 625 | |
|
626 | 626 | def test_merge_source_is_bookmark(self, vcsbackend_hg): |
|
627 | 627 | target_repo = vcsbackend_hg.create_repo(number_of_commits=1) |
|
628 | 628 | source_repo = vcsbackend_hg.clone_repo(target_repo) |
|
629 | 629 | imc = source_repo.in_memory_commit |
|
630 | 630 | imc.add(FileNode('file_x', content=source_repo.name)) |
|
631 | 631 | imc.commit( |
|
632 | 632 | message=u'Automatic commit from repo merge test', |
|
633 | 633 | author=u'Automatic') |
|
634 | 634 | target_commit = target_repo.get_commit() |
|
635 | 635 | source_commit = source_repo.get_commit() |
|
636 | 636 | default_branch = target_repo.DEFAULT_BRANCH_NAME |
|
637 | 637 | bookmark_name = 'bookmark' |
|
638 | 638 | target_ref = Reference('branch', default_branch, target_commit.raw_id) |
|
639 | 639 | source_repo._update(default_branch) |
|
640 | 640 | source_repo.bookmark(bookmark_name) |
|
641 | 641 | source_ref = Reference('book', bookmark_name, source_commit.raw_id) |
|
642 | 642 | workspace_id = 'test-merge' |
|
643 | 643 | repo_id = repo_id_generator(target_repo.path) |
|
644 | 644 | merge_response = target_repo.merge( |
|
645 | 645 | repo_id, workspace_id, target_ref, source_repo, source_ref, |
|
646 | 646 | 'test user', 'test@rhodecode.com', 'merge message 1', |
|
647 | 647 | dry_run=False) |
|
648 | 648 | expected_merge_response = MergeResponse( |
|
649 | 649 | True, True, merge_response.merge_ref, |
|
650 | 650 | MergeFailureReason.NONE) |
|
651 | 651 | assert merge_response == expected_merge_response |
|
652 | 652 | |
|
653 | 653 | target_repo = backends.get_backend(vcsbackend_hg.alias)( |
|
654 | 654 | target_repo.path) |
|
655 | 655 | target_commits = list(target_repo.get_commits()) |
|
656 | 656 | commit_ids = [c.raw_id for c in target_commits] |
|
657 | 657 | assert source_ref.commit_id == commit_ids[-1] |
|
658 | 658 | assert target_ref.commit_id == commit_ids[-2] |
|
659 | 659 | |
|
660 | 660 | def test_merge_target_has_multiple_heads(self, vcsbackend_hg): |
|
661 | 661 | target_repo = vcsbackend_hg.create_repo(number_of_commits=2) |
|
662 | 662 | source_repo = vcsbackend_hg.clone_repo(target_repo) |
|
663 | 663 | vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1') |
|
664 | 664 | vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2') |
|
665 | 665 | |
|
666 | 666 | # add an extra head to the target repo |
|
667 | 667 | imc = target_repo.in_memory_commit |
|
668 | 668 | imc.add(FileNode('file_x', content='foo')) |
|
669 | 669 | commits = list(target_repo.get_commits()) |
|
670 | 670 | imc.commit( |
|
671 | 671 | message=u'Automatic commit from repo merge test', |
|
672 | 672 | author=u'Automatic', parents=commits[0:1]) |
|
673 | 673 | |
|
674 | 674 | target_commit = target_repo.get_commit() |
|
675 | 675 | source_commit = source_repo.get_commit() |
|
676 | 676 | default_branch = target_repo.DEFAULT_BRANCH_NAME |
|
677 | 677 | target_repo._update(default_branch) |
|
678 | 678 | |
|
679 | 679 | target_ref = Reference('branch', default_branch, target_commit.raw_id) |
|
680 | 680 | source_ref = Reference('branch', default_branch, source_commit.raw_id) |
|
681 | 681 | workspace_id = 'test-merge' |
|
682 | 682 | |
|
683 | 683 | assert len(target_repo._heads(branch='default')) == 2 |
|
684 | 684 | expected_merge_response = MergeResponse( |
|
685 | 685 | False, False, None, |
|
686 | 686 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS) |
|
687 | 687 | repo_id = repo_id_generator(target_repo.path) |
|
688 | 688 | merge_response = target_repo.merge( |
|
689 | 689 | repo_id, workspace_id, target_ref, source_repo, source_ref, |
|
690 | 690 | 'test user', 'test@rhodecode.com', 'merge message 1', |
|
691 | 691 | dry_run=False) |
|
692 | 692 | assert merge_response == expected_merge_response |
|
693 | 693 | |
|
694 | 694 | def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg): |
|
695 | 695 | target_repo = vcsbackend_hg.create_repo(number_of_commits=1) |
|
696 | 696 | source_repo = vcsbackend_hg.clone_repo(target_repo) |
|
697 | 697 | vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1') |
|
698 | 698 | vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2') |
|
699 | 699 | imc = source_repo.in_memory_commit |
|
700 | 700 | imc.add(FileNode('file_x', content=source_repo.name)) |
|
701 | 701 | imc.commit( |
|
702 | 702 | message=u'Automatic commit from repo merge test', |
|
703 | 703 | author=u'Automatic') |
|
704 | 704 | target_commit = target_repo.get_commit() |
|
705 | 705 | source_commit = source_repo.get_commit() |
|
706 | 706 | |
|
707 | 707 | vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info') |
|
708 | 708 | |
|
709 | 709 | default_branch = target_repo.DEFAULT_BRANCH_NAME |
|
710 | 710 | bookmark_name = 'bookmark' |
|
711 | 711 | source_repo._update(default_branch) |
|
712 | 712 | source_repo.bookmark(bookmark_name) |
|
713 | 713 | |
|
714 | 714 | target_ref = Reference('branch', default_branch, target_commit.raw_id) |
|
715 | 715 | source_ref = Reference('book', bookmark_name, source_commit.raw_id) |
|
716 | 716 | repo_id = repo_id_generator(target_repo.path) |
|
717 | 717 | workspace_id = 'test-merge' |
|
718 | 718 | |
|
719 | 719 | merge_response = target_repo.merge( |
|
720 | 720 | repo_id, workspace_id, target_ref, source_repo, source_ref, |
|
721 | 721 | 'test user', 'test@rhodecode.com', 'merge message 1', |
|
722 | 722 | dry_run=False, use_rebase=True) |
|
723 | 723 | |
|
724 | 724 | expected_merge_response = MergeResponse( |
|
725 | 725 | True, True, merge_response.merge_ref, |
|
726 | 726 | MergeFailureReason.NONE) |
|
727 | 727 | assert merge_response == expected_merge_response |
|
728 | 728 | |
|
729 | 729 | target_repo = backends.get_backend(vcsbackend_hg.alias)( |
|
730 | 730 | target_repo.path) |
|
731 | 731 | last_commit = target_repo.get_commit() |
|
732 | 732 | assert last_commit.message == source_commit.message |
|
733 | 733 | assert last_commit.author == source_commit.author |
|
734 | 734 | # This checks that we effectively did a rebase |
|
735 | 735 | assert last_commit.raw_id != source_commit.raw_id |
|
736 | 736 | |
|
737 | 737 | # Check the target has only 4 commits: 2 were already in target and |
|
738 | 738 | # only two should have been added |
|
739 | 739 | assert len(target_repo.commit_ids) == 2 + 2 |
|
740 | 740 | |
|
741 | 741 | |
|
742 | 742 | class TestGetShadowInstance(object): |
|
743 | 743 | |
|
744 | 744 | @pytest.fixture |
|
745 | 745 | def repo(self, vcsbackend_hg, monkeypatch): |
|
746 | 746 | repo = vcsbackend_hg.repo |
|
747 | 747 | monkeypatch.setattr(repo, 'config', mock.Mock()) |
|
748 | 748 | monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock()) |
|
749 | 749 | return repo |
|
750 | 750 | |
|
751 | 751 | def test_passes_config(self, repo): |
|
752 | 752 | shadow = repo._get_shadow_instance(repo.path) |
|
753 | 753 | assert shadow.config == repo.config.copy() |
|
754 | 754 | |
|
755 | 755 | def test_disables_hooks(self, repo): |
|
756 | 756 | shadow = repo._get_shadow_instance(repo.path) |
|
757 | 757 | shadow.config.clear_section.assert_called_once_with('hooks') |
|
758 | 758 | |
|
759 | 759 | def test_allows_to_keep_hooks(self, repo): |
|
760 | 760 | shadow = repo._get_shadow_instance(repo.path, enable_hooks=True) |
|
761 | 761 | assert not shadow.config.clear_section.called |
|
762 | 762 | |
|
763 | 763 | |
|
764 | 764 | class TestMercurialCommit(object): |
|
765 | 765 | |
|
766 | 766 | def _test_equality(self, commit): |
|
767 | 767 | idx = commit.idx |
|
768 | 768 | assert commit == self.repo.get_commit(commit_idx=idx) |
|
769 | 769 | |
|
770 | 770 | def test_equality(self): |
|
771 | 771 | indexes = [0, 10, 20] |
|
772 | 772 | commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes] |
|
773 | 773 | for commit in commits: |
|
774 | 774 | self._test_equality(commit) |
|
775 | 775 | |
|
776 | 776 | def test_default_commit(self): |
|
777 | 777 | tip = self.repo.get_commit('tip') |
|
778 | 778 | assert tip == self.repo.get_commit() |
|
779 | 779 | assert tip == self.repo.get_commit(commit_id=None) |
|
780 | 780 | assert tip == self.repo.get_commit(commit_idx=None) |
|
781 | 781 | assert tip == list(self.repo[-1:])[0] |
|
782 | 782 | |
|
783 | 783 | def test_root_node(self): |
|
784 | 784 | tip = self.repo.get_commit('tip') |
|
785 | 785 | assert tip.root is tip.get_node('') |
|
786 | 786 | |
|
787 | 787 | def test_lazy_fetch(self): |
|
788 | 788 | """ |
|
789 | 789 | Test if commit's nodes expands and are cached as we walk through |
|
790 | 790 | the commit. This test is somewhat hard to write as order of tests |
|
791 | 791 | is a key here. Written by running command after command in a shell. |
|
792 | 792 | """ |
|
793 | 793 | commit = self.repo.get_commit(commit_idx=45) |
|
794 | 794 | assert len(commit.nodes) == 0 |
|
795 | 795 | root = commit.root |
|
796 | 796 | assert len(commit.nodes) == 1 |
|
797 | 797 | assert len(root.nodes) == 8 |
|
798 | 798 | # accessing root.nodes updates commit.nodes |
|
799 | 799 | assert len(commit.nodes) == 9 |
|
800 | 800 | |
|
801 | 801 | docs = root.get_node('docs') |
|
802 | 802 | # we haven't yet accessed anything new as docs dir was already cached |
|
803 | 803 | assert len(commit.nodes) == 9 |
|
804 | 804 | assert len(docs.nodes) == 8 |
|
805 | 805 | # accessing docs.nodes updates commit.nodes |
|
806 | 806 | assert len(commit.nodes) == 17 |
|
807 | 807 | |
|
808 | 808 | assert docs is commit.get_node('docs') |
|
809 | 809 | assert docs is root.nodes[0] |
|
810 | 810 | assert docs is root.dirs[0] |
|
811 | 811 | assert docs is commit.get_node('docs') |
|
812 | 812 | |
|
813 | 813 | def test_nodes_with_commit(self): |
|
814 | 814 | commit = self.repo.get_commit(commit_idx=45) |
|
815 | 815 | root = commit.root |
|
816 | 816 | docs = root.get_node('docs') |
|
817 | 817 | assert docs is commit.get_node('docs') |
|
818 | 818 | api = docs.get_node('api') |
|
819 | 819 | assert api is commit.get_node('docs/api') |
|
820 | 820 | index = api.get_node('index.rst') |
|
821 | 821 | assert index is commit.get_node('docs/api/index.rst') |
|
822 | 822 | assert index is commit.get_node( |
|
823 | 823 | 'docs').get_node('api').get_node('index.rst') |
|
824 | 824 | |
|
825 | 825 | def test_branch_and_tags(self): |
|
826 | 826 | commit0 = self.repo.get_commit(commit_idx=0) |
|
827 | 827 | assert commit0.branch == 'default' |
|
828 | 828 | assert commit0.tags == [] |
|
829 | 829 | |
|
830 | 830 | commit10 = self.repo.get_commit(commit_idx=10) |
|
831 | 831 | assert commit10.branch == 'default' |
|
832 | 832 | assert commit10.tags == [] |
|
833 | 833 | |
|
834 | 834 | commit44 = self.repo.get_commit(commit_idx=44) |
|
835 | 835 | assert commit44.branch == 'web' |
|
836 | 836 | |
|
837 | 837 | tip = self.repo.get_commit('tip') |
|
838 | 838 | assert 'tip' in tip.tags |
|
839 | 839 | |
|
840 | 840 | def test_bookmarks(self): |
|
841 | 841 | commit0 = self.repo.get_commit(commit_idx=0) |
|
842 | 842 | assert commit0.bookmarks == [] |
|
843 | 843 | |
|
844 | 844 | def _test_file_size(self, idx, path, size): |
|
845 | 845 | node = self.repo.get_commit(commit_idx=idx).get_node(path) |
|
846 | 846 | assert node.is_file() |
|
847 | 847 | assert node.size == size |
|
848 | 848 | |
|
849 | 849 | def test_file_size(self): |
|
850 | 850 | to_check = ( |
|
851 | 851 | (10, 'setup.py', 1068), |
|
852 | 852 | (20, 'setup.py', 1106), |
|
853 | 853 | (60, 'setup.py', 1074), |
|
854 | 854 | |
|
855 | 855 | (10, 'vcs/backends/base.py', 2921), |
|
856 | 856 | (20, 'vcs/backends/base.py', 3936), |
|
857 | 857 | (60, 'vcs/backends/base.py', 6189), |
|
858 | 858 | ) |
|
859 | 859 | for idx, path, size in to_check: |
|
860 | 860 | self._test_file_size(idx, path, size) |
|
861 | 861 | |
|
862 | 862 | def test_file_history_from_commits(self): |
|
863 | 863 | node = self.repo[10].get_node('setup.py') |
|
864 | 864 | commit_ids = [commit.raw_id for commit in node.history] |
|
865 | 865 | assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids |
|
866 | 866 | |
|
867 | 867 | node = self.repo[20].get_node('setup.py') |
|
868 | 868 | node_ids = [commit.raw_id for commit in node.history] |
|
869 | 869 | assert ['eada5a770da98ab0dd7325e29d00e0714f228d09', |
|
870 | 870 | '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids |
|
871 | 871 | |
|
872 | 872 | # special case we check history from commit that has this particular |
|
873 | 873 | # file changed this means we check if it's included as well |
|
874 | 874 | node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\ |
|
875 | 875 | .get_node('setup.py') |
|
876 | 876 | node_ids = [commit.raw_id for commit in node.history] |
|
877 | 877 | assert ['eada5a770da98ab0dd7325e29d00e0714f228d09', |
|
878 | 878 | '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids |
|
879 | 879 | |
|
880 | 880 | def test_file_history(self): |
|
881 | 881 | # we can only check if those commits are present in the history |
|
882 | 882 | # as we cannot update this test every time file is changed |
|
883 | 883 | files = { |
|
884 | 884 | 'setup.py': [7, 18, 45, 46, 47, 69, 77], |
|
885 | 885 | 'vcs/nodes.py': [ |
|
886 | 886 | 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76], |
|
887 | 887 | 'vcs/backends/hg.py': [ |
|
888 | 888 | 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30, |
|
889 | 889 | 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54, |
|
890 | 890 | 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82], |
|
891 | 891 | } |
|
892 | 892 | for path, indexes in files.items(): |
|
893 | 893 | tip = self.repo.get_commit(commit_idx=indexes[-1]) |
|
894 | 894 | node = tip.get_node(path) |
|
895 | 895 | node_indexes = [commit.idx for commit in node.history] |
|
896 | 896 | assert set(indexes).issubset(set(node_indexes)), ( |
|
897 | 897 | "We assumed that %s is subset of commits for which file %s " |
|
898 | 898 | "has been changed, and history of that node returned: %s" |
|
899 | 899 | % (indexes, path, node_indexes)) |
|
900 | 900 | |
|
901 | 901 | def test_file_annotate(self): |
|
902 | 902 | files = { |
|
903 | 903 | 'vcs/backends/__init__.py': { |
|
904 | 904 | 89: { |
|
905 | 905 | 'lines_no': 31, |
|
906 | 906 | 'commits': [ |
|
907 | 907 | 32, 32, 61, 32, 32, 37, 32, 32, 32, 44, |
|
908 | 908 | 37, 37, 37, 37, 45, 37, 44, 37, 37, 37, |
|
909 | 909 | 32, 32, 32, 32, 37, 32, 37, 37, 32, |
|
910 | 910 | 32, 32 |
|
911 | 911 | ] |
|
912 | 912 | }, |
|
913 | 913 | 20: { |
|
914 | 914 | 'lines_no': 1, |
|
915 | 915 | 'commits': [4] |
|
916 | 916 | }, |
|
917 | 917 | 55: { |
|
918 | 918 | 'lines_no': 31, |
|
919 | 919 | 'commits': [ |
|
920 | 920 | 32, 32, 45, 32, 32, 37, 32, 32, 32, 44, |
|
921 | 921 | 37, 37, 37, 37, 45, 37, 44, 37, 37, 37, |
|
922 | 922 | 32, 32, 32, 32, 37, 32, 37, 37, 32, |
|
923 | 923 | 32, 32 |
|
924 | 924 | ] |
|
925 | 925 | } |
|
926 | 926 | }, |
|
927 | 927 | 'vcs/exceptions.py': { |
|
928 | 928 | 89: { |
|
929 | 929 | 'lines_no': 18, |
|
930 | 930 | 'commits': [ |
|
931 | 931 | 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, |
|
932 | 932 | 16, 16, 17, 16, 16, 18, 18, 18 |
|
933 | 933 | ] |
|
934 | 934 | }, |
|
935 | 935 | 20: { |
|
936 | 936 | 'lines_no': 18, |
|
937 | 937 | 'commits': [ |
|
938 | 938 | 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, |
|
939 | 939 | 16, 16, 17, 16, 16, 18, 18, 18 |
|
940 | 940 | ] |
|
941 | 941 | }, |
|
942 | 942 | 55: { |
|
943 | 943 | 'lines_no': 18, |
|
944 | 944 | 'commits': [ |
|
945 | 945 | 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, |
|
946 | 946 | 17, 16, 16, 18, 18, 18 |
|
947 | 947 | ] |
|
948 | 948 | } |
|
949 | 949 | }, |
|
950 | 950 | 'MANIFEST.in': { |
|
951 | 951 | 89: { |
|
952 | 952 | 'lines_no': 5, |
|
953 | 953 | 'commits': [7, 7, 7, 71, 71] |
|
954 | 954 | }, |
|
955 | 955 | 20: { |
|
956 | 956 | 'lines_no': 3, |
|
957 | 957 | 'commits': [7, 7, 7] |
|
958 | 958 | }, |
|
959 | 959 | 55: { |
|
960 | 960 | 'lines_no': 3, |
|
961 | 961 | 'commits': [7, 7, 7] |
|
962 | 962 | } |
|
963 | 963 | } |
|
964 | 964 | } |
|
965 | 965 | |
|
966 | 966 | for fname, commit_dict in files.items(): |
|
967 | 967 | for idx, __ in commit_dict.items(): |
|
968 | 968 | commit = self.repo.get_commit(commit_idx=idx) |
|
969 | 969 | l1_1 = [x[1] for x in commit.get_file_annotate(fname)] |
|
970 | 970 | l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)] |
|
971 | 971 | assert l1_1 == l1_2 |
|
972 | 972 | l1 = l1_2 = [ |
|
973 | 973 | x[2]().idx for x in commit.get_file_annotate(fname)] |
|
974 | 974 | l2 = files[fname][idx]['commits'] |
|
975 | 975 | assert l1 == l2, ( |
|
976 | 976 | "The lists of commit for %s@commit_id%s" |
|
977 | 977 | "from annotation list should match each other," |
|
978 | 978 | "got \n%s \nvs \n%s " % (fname, idx, l1, l2)) |
|
979 | 979 | |
|
980 | 980 | def test_commit_state(self): |
|
981 | 981 | """ |
|
982 | 982 | Tests which files have been added/changed/removed at particular commit |
|
983 | 983 | """ |
|
984 | 984 | |
|
985 | 985 | # commit_id 46ad32a4f974: |
|
986 | 986 | # hg st --rev 46ad32a4f974 |
|
987 | 987 | # changed: 13 |
|
988 | 988 | # added: 20 |
|
989 | 989 | # removed: 1 |
|
990 | 990 | changed = set([ |
|
991 | 991 | '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst', |
|
992 | 992 | 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py', |
|
993 | 993 | 'vcs/__init__.py', 'vcs/backends/__init__.py', |
|
994 | 994 | 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py', |
|
995 | 995 | 'vcs/utils/__init__.py']) |
|
996 | 996 | |
|
997 | 997 | added = set([ |
|
998 | 998 | 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst', |
|
999 | 999 | 'docs/api/index.rst', 'docs/api/nodes.rst', |
|
1000 | 1000 | 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst', |
|
1001 | 1001 | 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg', |
|
1002 | 1002 | 'vcs/utils/baseui_config.py', 'vcs/utils/web.py', |
|
1003 | 1003 | 'vcs/web/__init__.py', 'vcs/web/exceptions.py', |
|
1004 | 1004 | 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py', |
|
1005 | 1005 | 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py', |
|
1006 | 1006 | 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py', |
|
1007 | 1007 | 'vcs/web/simplevcs/views.py']) |
|
1008 | 1008 | |
|
1009 | 1009 | removed = set(['docs/api.rst']) |
|
1010 | 1010 | |
|
1011 | 1011 | commit64 = self.repo.get_commit('46ad32a4f974') |
|
1012 | 1012 | assert set((node.path for node in commit64.added)) == added |
|
1013 | 1013 | assert set((node.path for node in commit64.changed)) == changed |
|
1014 | 1014 | assert set((node.path for node in commit64.removed)) == removed |
|
1015 | 1015 | |
|
1016 | 1016 | # commit_id b090f22d27d6: |
|
1017 | 1017 | # hg st --rev b090f22d27d6 |
|
1018 | 1018 | # changed: 13 |
|
1019 | 1019 | # added: 20 |
|
1020 | 1020 | # removed: 1 |
|
1021 | 1021 | commit88 = self.repo.get_commit('b090f22d27d6') |
|
1022 | 1022 | assert set((node.path for node in commit88.added)) == set() |
|
1023 | 1023 | assert set((node.path for node in commit88.changed)) == \ |
|
1024 | 1024 | set(['.hgignore']) |
|
1025 | 1025 | assert set((node.path for node in commit88.removed)) == set() |
|
1026 | 1026 | |
|
1027 | 1027 | # |
|
1028 | 1028 | # 85: |
|
1029 | 1029 | # added: 2 [ |
|
1030 | 1030 | # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py'] |
|
1031 | 1031 | # changed: 4 ['vcs/web/simplevcs/models.py', ...] |
|
1032 | 1032 | # removed: 1 ['vcs/utils/web.py'] |
|
1033 | 1033 | commit85 = self.repo.get_commit(commit_idx=85) |
|
1034 | 1034 | assert set((node.path for node in commit85.added)) == set([ |
|
1035 | 1035 | 'vcs/utils/diffs.py', |
|
1036 | 1036 | 'vcs/web/simplevcs/views/diffs.py']) |
|
1037 | 1037 | assert set((node.path for node in commit85.changed)) == set([ |
|
1038 | 1038 | 'vcs/web/simplevcs/models.py', |
|
1039 | 1039 | 'vcs/web/simplevcs/utils.py', |
|
1040 | 1040 | 'vcs/web/simplevcs/views/__init__.py', |
|
1041 | 1041 | 'vcs/web/simplevcs/views/repository.py', |
|
1042 | 1042 | ]) |
|
1043 | 1043 | assert set((node.path for node in commit85.removed)) == \ |
|
1044 | 1044 | set(['vcs/utils/web.py']) |
|
1045 | 1045 | |
|
1046 | 1046 | def test_files_state(self): |
|
1047 | 1047 | """ |
|
1048 | 1048 | Tests state of FileNodes. |
|
1049 | 1049 | """ |
|
1050 | 1050 | commit = self.repo.get_commit(commit_idx=85) |
|
1051 | 1051 | node = commit.get_node('vcs/utils/diffs.py') |
|
1052 | 1052 | assert node.state, NodeState.ADDED |
|
1053 | 1053 | assert node.added |
|
1054 | 1054 | assert not node.changed |
|
1055 | 1055 | assert not node.not_changed |
|
1056 | 1056 | assert not node.removed |
|
1057 | 1057 | |
|
1058 | 1058 | commit = self.repo.get_commit(commit_idx=88) |
|
1059 | 1059 | node = commit.get_node('.hgignore') |
|
1060 | 1060 | assert node.state, NodeState.CHANGED |
|
1061 | 1061 | assert not node.added |
|
1062 | 1062 | assert node.changed |
|
1063 | 1063 | assert not node.not_changed |
|
1064 | 1064 | assert not node.removed |
|
1065 | 1065 | |
|
1066 | 1066 | commit = self.repo.get_commit(commit_idx=85) |
|
1067 | 1067 | node = commit.get_node('setup.py') |
|
1068 | 1068 | assert node.state, NodeState.NOT_CHANGED |
|
1069 | 1069 | assert not node.added |
|
1070 | 1070 | assert not node.changed |
|
1071 | 1071 | assert node.not_changed |
|
1072 | 1072 | assert not node.removed |
|
1073 | 1073 | |
|
1074 | 1074 | # If node has REMOVED state then trying to fetch it would raise |
|
1075 | 1075 | # CommitError exception |
|
1076 | 1076 | commit = self.repo.get_commit(commit_idx=2) |
|
1077 | 1077 | path = 'vcs/backends/BaseRepository.py' |
|
1078 | 1078 | with pytest.raises(NodeDoesNotExistError): |
|
1079 | 1079 | commit.get_node(path) |
|
1080 | 1080 | # but it would be one of ``removed`` (commit's attribute) |
|
1081 | 1081 | assert path in [rf.path for rf in commit.removed] |
|
1082 | 1082 | |
|
1083 | 1083 | def test_commit_message_is_unicode(self): |
|
1084 | 1084 | for cm in self.repo: |
|
1085 | 1085 | assert type(cm.message) == unicode |
|
1086 | 1086 | |
|
1087 | 1087 | def test_commit_author_is_unicode(self): |
|
1088 | 1088 | for cm in self.repo: |
|
1089 | 1089 | assert type(cm.author) == unicode |
|
1090 | 1090 | |
|
1091 | 1091 | def test_repo_files_content_is_unicode(self): |
|
1092 | 1092 | test_commit = self.repo.get_commit(commit_idx=100) |
|
1093 | 1093 | for node in test_commit.get_node('/'): |
|
1094 | 1094 | if node.is_file(): |
|
1095 | 1095 | assert type(node.content) == unicode |
|
1096 | 1096 | |
|
1097 | 1097 | def test_wrong_path(self): |
|
1098 | 1098 | # There is 'setup.py' in the root dir but not there: |
|
1099 | 1099 | path = 'foo/bar/setup.py' |
|
1100 | 1100 | with pytest.raises(VCSError): |
|
1101 | 1101 | self.repo.get_commit().get_node(path) |
|
1102 | 1102 | |
|
1103 | 1103 | def test_author_email(self): |
|
1104 | 1104 | assert 'marcin@python-blog.com' == \ |
|
1105 | 1105 | self.repo.get_commit('b986218ba1c9').author_email |
|
1106 | 1106 | assert 'lukasz.balcerzak@python-center.pl' == \ |
|
1107 | 1107 | self.repo.get_commit('3803844fdbd3').author_email |
|
1108 | 1108 | assert '' == self.repo.get_commit('84478366594b').author_email |
|
1109 | 1109 | |
|
1110 | 1110 | def test_author_username(self): |
|
1111 | 1111 | assert 'Marcin Kuzminski' == \ |
|
1112 | 1112 | self.repo.get_commit('b986218ba1c9').author_name |
|
1113 | 1113 | assert 'Lukasz Balcerzak' == \ |
|
1114 | 1114 | self.repo.get_commit('3803844fdbd3').author_name |
|
1115 | 1115 | assert 'marcink' == \ |
|
1116 | 1116 | self.repo.get_commit('84478366594b').author_name |
|
1117 | 1117 | |
|
1118 | 1118 | |
|
1119 | 1119 | class TestLargeFileRepo(object): |
|
1120 | 1120 | |
|
1121 | 1121 | def test_large_file(self, backend_hg): |
|
1122 | 1122 | repo = backend_hg.create_test_repo('largefiles', make_db_config()) |
|
1123 | 1123 | |
|
1124 | 1124 | tip = repo.scm_instance().get_commit() |
|
1125 | 1125 | node = tip.get_node('.hglf/thisfileislarge') |
|
1126 | 1126 | |
|
1127 | 1127 | lf_node = node.get_largefile_node() |
|
1128 | 1128 | |
|
1129 | 1129 | assert lf_node.is_largefile() is True |
|
1130 | 1130 | assert lf_node.size == 1024000 |
|
1131 | 1131 | assert lf_node.name == '.hglf/thisfileislarge' |
|
1132 | 1132 | |
|
1133 | 1133 | |
|
1134 | 1134 | class TestGetBranchName(object): |
|
1135 | 1135 | def test_returns_ref_name_when_type_is_branch(self): |
|
1136 | 1136 | ref = self._create_ref('branch', 'fake-name') |
|
1137 | 1137 | result = self.repo._get_branch_name(ref) |
|
1138 | 1138 | assert result == ref.name |
|
1139 | 1139 | |
|
1140 | 1140 | @pytest.mark.parametrize("type_", ("book", "tag")) |
|
1141 | 1141 | def test_queries_remote_when_type_is_not_branch(self, type_): |
|
1142 | 1142 | ref = self._create_ref(type_, 'wrong-fake-name') |
|
1143 | 1143 | with mock.patch.object(self.repo, "_remote") as remote_mock: |
|
1144 | 1144 | remote_mock.ctx_branch.return_value = "fake-name" |
|
1145 | 1145 | result = self.repo._get_branch_name(ref) |
|
1146 | 1146 | assert result == "fake-name" |
|
1147 | 1147 | remote_mock.ctx_branch.assert_called_once_with(ref.commit_id) |
|
1148 | 1148 | |
|
1149 | 1149 | def _create_ref(self, type_, name): |
|
1150 | 1150 | ref = mock.Mock() |
|
1151 | 1151 | ref.type = type_ |
|
1152 | 1152 | ref.name = 'wrong-fake-name' |
|
1153 | 1153 | ref.commit_id = "deadbeef" |
|
1154 | 1154 | return ref |
|
1155 | 1155 | |
|
1156 | 1156 | |
|
1157 | 1157 | class TestIsTheSameBranch(object): |
|
1158 | 1158 | def test_returns_true_when_branches_are_equal(self): |
|
1159 | 1159 | source_ref = mock.Mock(name="source-ref") |
|
1160 | 1160 | target_ref = mock.Mock(name="target-ref") |
|
1161 | 1161 | branch_name_patcher = mock.patch.object( |
|
1162 | 1162 | self.repo, "_get_branch_name", return_value="default") |
|
1163 | 1163 | with branch_name_patcher as branch_name_mock: |
|
1164 | 1164 | result = self.repo._is_the_same_branch(source_ref, target_ref) |
|
1165 | 1165 | |
|
1166 | 1166 | expected_calls = [mock.call(source_ref), mock.call(target_ref)] |
|
1167 | 1167 | assert branch_name_mock.call_args_list == expected_calls |
|
1168 | 1168 | assert result is True |
|
1169 | 1169 | |
|
1170 | 1170 | def test_returns_false_when_branches_are_not_equal(self): |
|
1171 | 1171 | source_ref = mock.Mock(name="source-ref") |
|
1172 | 1172 | source_ref.name = "source-branch" |
|
1173 | 1173 | target_ref = mock.Mock(name="target-ref") |
|
1174 | 1174 | source_ref.name = "target-branch" |
|
1175 | 1175 | |
|
1176 | 1176 | def side_effect(ref): |
|
1177 | 1177 | return ref.name |
|
1178 | 1178 | |
|
1179 | 1179 | branch_name_patcher = mock.patch.object( |
|
1180 | 1180 | self.repo, "_get_branch_name", side_effect=side_effect) |
|
1181 | 1181 | with branch_name_patcher as branch_name_mock: |
|
1182 | 1182 | result = self.repo._is_the_same_branch(source_ref, target_ref) |
|
1183 | 1183 | |
|
1184 | 1184 | expected_calls = [mock.call(source_ref), mock.call(target_ref)] |
|
1185 | 1185 | assert branch_name_mock.call_args_list == expected_calls |
|
1186 | 1186 | assert result is False |
General Comments 0
You need to be logged in to leave comments.
Login now