##// END OF EJS Templates
git: use a fetch_sync based creation of remote repos....
marcink -
r3078:0a44452a default
parent child Browse files
Show More
@@ -1,338 +1,342 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.model.db import User
23 from rhodecode.model.db import User
24 from rhodecode.model.pull_request import PullRequestModel
24 from rhodecode.model.pull_request import PullRequestModel
25 from rhodecode.model.repo import RepoModel
25 from rhodecode.model.repo import RepoModel
26 from rhodecode.model.user import UserModel
26 from rhodecode.model.user import UserModel
27 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
27 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
28 from rhodecode.api.tests.utils import build_data, api_call, assert_error
28 from rhodecode.api.tests.utils import build_data, api_call, assert_error
29
29
30
30
31 @pytest.mark.usefixtures("testuser_api", "app")
31 @pytest.mark.usefixtures("testuser_api", "app")
32 class TestCreatePullRequestApi(object):
32 class TestCreatePullRequestApi(object):
33 finalizers = []
33 finalizers = []
34
34
35 def teardown_method(self, method):
35 def teardown_method(self, method):
36 if self.finalizers:
36 if self.finalizers:
37 for finalizer in self.finalizers:
37 for finalizer in self.finalizers:
38 finalizer()
38 finalizer()
39 self.finalizers = []
39 self.finalizers = []
40
40
41 def test_create_with_wrong_data(self):
41 def test_create_with_wrong_data(self):
42 required_data = {
42 required_data = {
43 'source_repo': 'tests/source_repo',
43 'source_repo': 'tests/source_repo',
44 'target_repo': 'tests/target_repo',
44 'target_repo': 'tests/target_repo',
45 'source_ref': 'branch:default:initial',
45 'source_ref': 'branch:default:initial',
46 'target_ref': 'branch:default:new-feature',
46 'target_ref': 'branch:default:new-feature',
47 }
47 }
48 for key in required_data:
48 for key in required_data:
49 data = required_data.copy()
49 data = required_data.copy()
50 data.pop(key)
50 data.pop(key)
51 id_, params = build_data(
51 id_, params = build_data(
52 self.apikey, 'create_pull_request', **data)
52 self.apikey, 'create_pull_request', **data)
53 response = api_call(self.app, params)
53 response = api_call(self.app, params)
54
54
55 expected = 'Missing non optional `{}` arg in JSON DATA'.format(key)
55 expected = 'Missing non optional `{}` arg in JSON DATA'.format(key)
56 assert_error(id_, expected, given=response.body)
56 assert_error(id_, expected, given=response.body)
57
57
58 @pytest.mark.backends("git", "hg")
58 @pytest.mark.backends("git", "hg")
59 def test_create_with_correct_data(self, backend):
59 def test_create_with_correct_data(self, backend):
60 data = self._prepare_data(backend)
60 data = self._prepare_data(backend)
61 RepoModel().revoke_user_permission(
61 RepoModel().revoke_user_permission(
62 self.source.repo_name, User.DEFAULT_USER)
62 self.source.repo_name, User.DEFAULT_USER)
63 id_, params = build_data(
63 id_, params = build_data(
64 self.apikey_regular, 'create_pull_request', **data)
64 self.apikey_regular, 'create_pull_request', **data)
65 response = api_call(self.app, params)
65 response = api_call(self.app, params)
66 expected_message = "Created new pull request `{title}`".format(
66 expected_message = "Created new pull request `{title}`".format(
67 title=data['title'])
67 title=data['title'])
68 result = response.json
68 result = response.json
69 assert result['error'] == None
69 assert result['result']['msg'] == expected_message
70 assert result['result']['msg'] == expected_message
70 pull_request_id = result['result']['pull_request_id']
71 pull_request_id = result['result']['pull_request_id']
71 pull_request = PullRequestModel().get(pull_request_id)
72 pull_request = PullRequestModel().get(pull_request_id)
72 assert pull_request.title == data['title']
73 assert pull_request.title == data['title']
73 assert pull_request.description == data['description']
74 assert pull_request.description == data['description']
74 assert pull_request.source_ref == data['source_ref']
75 assert pull_request.source_ref == data['source_ref']
75 assert pull_request.target_ref == data['target_ref']
76 assert pull_request.target_ref == data['target_ref']
76 assert pull_request.source_repo.repo_name == data['source_repo']
77 assert pull_request.source_repo.repo_name == data['source_repo']
77 assert pull_request.target_repo.repo_name == data['target_repo']
78 assert pull_request.target_repo.repo_name == data['target_repo']
78 assert pull_request.revisions == [self.commit_ids['change']]
79 assert pull_request.revisions == [self.commit_ids['change']]
79 assert len(pull_request.reviewers) == 1
80 assert len(pull_request.reviewers) == 1
80
81
81 @pytest.mark.backends("git", "hg")
82 @pytest.mark.backends("git", "hg")
82 def test_create_with_empty_description(self, backend):
83 def test_create_with_empty_description(self, backend):
83 data = self._prepare_data(backend)
84 data = self._prepare_data(backend)
84 data.pop('description')
85 data.pop('description')
85 id_, params = build_data(
86 id_, params = build_data(
86 self.apikey_regular, 'create_pull_request', **data)
87 self.apikey_regular, 'create_pull_request', **data)
87 response = api_call(self.app, params)
88 response = api_call(self.app, params)
88 expected_message = "Created new pull request `{title}`".format(
89 expected_message = "Created new pull request `{title}`".format(
89 title=data['title'])
90 title=data['title'])
90 result = response.json
91 result = response.json
92 assert result['error'] == None
91 assert result['result']['msg'] == expected_message
93 assert result['result']['msg'] == expected_message
92 pull_request_id = result['result']['pull_request_id']
94 pull_request_id = result['result']['pull_request_id']
93 pull_request = PullRequestModel().get(pull_request_id)
95 pull_request = PullRequestModel().get(pull_request_id)
94 assert pull_request.description == ''
96 assert pull_request.description == ''
95
97
96 @pytest.mark.backends("git", "hg")
98 @pytest.mark.backends("git", "hg")
97 def test_create_with_empty_title(self, backend):
99 def test_create_with_empty_title(self, backend):
98 data = self._prepare_data(backend)
100 data = self._prepare_data(backend)
99 data.pop('title')
101 data.pop('title')
100 id_, params = build_data(
102 id_, params = build_data(
101 self.apikey_regular, 'create_pull_request', **data)
103 self.apikey_regular, 'create_pull_request', **data)
102 response = api_call(self.app, params)
104 response = api_call(self.app, params)
103 result = response.json
105 result = response.json
104 pull_request_id = result['result']['pull_request_id']
106 pull_request_id = result['result']['pull_request_id']
105 pull_request = PullRequestModel().get(pull_request_id)
107 pull_request = PullRequestModel().get(pull_request_id)
106 data['ref'] = backend.default_branch_name
108 data['ref'] = backend.default_branch_name
107 title = '{source_repo}#{ref} to {target_repo}'.format(**data)
109 title = '{source_repo}#{ref} to {target_repo}'.format(**data)
108 assert pull_request.title == title
110 assert pull_request.title == title
109
111
110 @pytest.mark.backends("git", "hg")
112 @pytest.mark.backends("git", "hg")
111 def test_create_with_reviewers_specified_by_names(
113 def test_create_with_reviewers_specified_by_names(
112 self, backend, no_notifications):
114 self, backend, no_notifications):
113 data = self._prepare_data(backend)
115 data = self._prepare_data(backend)
114 reviewers = [
116 reviewers = [
115 {'username': TEST_USER_REGULAR_LOGIN,
117 {'username': TEST_USER_REGULAR_LOGIN,
116 'reasons': ['{} added manually'.format(TEST_USER_REGULAR_LOGIN)]},
118 'reasons': ['{} added manually'.format(TEST_USER_REGULAR_LOGIN)]},
117 {'username': TEST_USER_ADMIN_LOGIN,
119 {'username': TEST_USER_ADMIN_LOGIN,
118 'reasons': ['{} added manually'.format(TEST_USER_ADMIN_LOGIN)],
120 'reasons': ['{} added manually'.format(TEST_USER_ADMIN_LOGIN)],
119 'mandatory': True},
121 'mandatory': True},
120 ]
122 ]
121 data['reviewers'] = reviewers
123 data['reviewers'] = reviewers
122
124
123 id_, params = build_data(
125 id_, params = build_data(
124 self.apikey_regular, 'create_pull_request', **data)
126 self.apikey_regular, 'create_pull_request', **data)
125 response = api_call(self.app, params)
127 response = api_call(self.app, params)
126
128
127 expected_message = "Created new pull request `{title}`".format(
129 expected_message = "Created new pull request `{title}`".format(
128 title=data['title'])
130 title=data['title'])
129 result = response.json
131 result = response.json
132 assert result['error'] == None
130 assert result['result']['msg'] == expected_message
133 assert result['result']['msg'] == expected_message
131 pull_request_id = result['result']['pull_request_id']
134 pull_request_id = result['result']['pull_request_id']
132 pull_request = PullRequestModel().get(pull_request_id)
135 pull_request = PullRequestModel().get(pull_request_id)
133
136
134 actual_reviewers = []
137 actual_reviewers = []
135 for rev in pull_request.reviewers:
138 for rev in pull_request.reviewers:
136 entry = {
139 entry = {
137 'username': rev.user.username,
140 'username': rev.user.username,
138 'reasons': rev.reasons,
141 'reasons': rev.reasons,
139 }
142 }
140 if rev.mandatory:
143 if rev.mandatory:
141 entry['mandatory'] = rev.mandatory
144 entry['mandatory'] = rev.mandatory
142 actual_reviewers.append(entry)
145 actual_reviewers.append(entry)
143
146
144 # default reviewer will be added who is an owner of the repo
147 # default reviewer will be added who is an owner of the repo
145 reviewers.append(
148 reviewers.append(
146 {'username': pull_request.author.username,
149 {'username': pull_request.author.username,
147 'reasons': [u'Default reviewer', u'Repository owner']},
150 'reasons': [u'Default reviewer', u'Repository owner']},
148 )
151 )
149 assert sorted(actual_reviewers, key=lambda e: e['username']) \
152 assert sorted(actual_reviewers, key=lambda e: e['username']) \
150 == sorted(reviewers, key=lambda e: e['username'])
153 == sorted(reviewers, key=lambda e: e['username'])
151
154
152 @pytest.mark.backends("git", "hg")
155 @pytest.mark.backends("git", "hg")
153 def test_create_with_reviewers_specified_by_ids(
156 def test_create_with_reviewers_specified_by_ids(
154 self, backend, no_notifications):
157 self, backend, no_notifications):
155 data = self._prepare_data(backend)
158 data = self._prepare_data(backend)
156 reviewers = [
159 reviewers = [
157 {'username': UserModel().get_by_username(
160 {'username': UserModel().get_by_username(
158 TEST_USER_REGULAR_LOGIN).user_id,
161 TEST_USER_REGULAR_LOGIN).user_id,
159 'reasons': ['added manually']},
162 'reasons': ['added manually']},
160 {'username': UserModel().get_by_username(
163 {'username': UserModel().get_by_username(
161 TEST_USER_ADMIN_LOGIN).user_id,
164 TEST_USER_ADMIN_LOGIN).user_id,
162 'reasons': ['added manually']},
165 'reasons': ['added manually']},
163 ]
166 ]
164
167
165 data['reviewers'] = reviewers
168 data['reviewers'] = reviewers
166 id_, params = build_data(
169 id_, params = build_data(
167 self.apikey_regular, 'create_pull_request', **data)
170 self.apikey_regular, 'create_pull_request', **data)
168 response = api_call(self.app, params)
171 response = api_call(self.app, params)
169
172
170 expected_message = "Created new pull request `{title}`".format(
173 expected_message = "Created new pull request `{title}`".format(
171 title=data['title'])
174 title=data['title'])
172 result = response.json
175 result = response.json
176 assert result['error'] == None
173 assert result['result']['msg'] == expected_message
177 assert result['result']['msg'] == expected_message
174 pull_request_id = result['result']['pull_request_id']
178 pull_request_id = result['result']['pull_request_id']
175 pull_request = PullRequestModel().get(pull_request_id)
179 pull_request = PullRequestModel().get(pull_request_id)
176
180
177 actual_reviewers = []
181 actual_reviewers = []
178 for rev in pull_request.reviewers:
182 for rev in pull_request.reviewers:
179 entry = {
183 entry = {
180 'username': rev.user.user_id,
184 'username': rev.user.user_id,
181 'reasons': rev.reasons,
185 'reasons': rev.reasons,
182 }
186 }
183 if rev.mandatory:
187 if rev.mandatory:
184 entry['mandatory'] = rev.mandatory
188 entry['mandatory'] = rev.mandatory
185 actual_reviewers.append(entry)
189 actual_reviewers.append(entry)
186 # default reviewer will be added who is an owner of the repo
190 # default reviewer will be added who is an owner of the repo
187 reviewers.append(
191 reviewers.append(
188 {'username': pull_request.author.user_id,
192 {'username': pull_request.author.user_id,
189 'reasons': [u'Default reviewer', u'Repository owner']},
193 'reasons': [u'Default reviewer', u'Repository owner']},
190 )
194 )
191 assert sorted(actual_reviewers, key=lambda e: e['username']) \
195 assert sorted(actual_reviewers, key=lambda e: e['username']) \
192 == sorted(reviewers, key=lambda e: e['username'])
196 == sorted(reviewers, key=lambda e: e['username'])
193
197
194 @pytest.mark.backends("git", "hg")
198 @pytest.mark.backends("git", "hg")
195 def test_create_fails_when_the_reviewer_is_not_found(self, backend):
199 def test_create_fails_when_the_reviewer_is_not_found(self, backend):
196 data = self._prepare_data(backend)
200 data = self._prepare_data(backend)
197 data['reviewers'] = [{'username': 'somebody'}]
201 data['reviewers'] = [{'username': 'somebody'}]
198 id_, params = build_data(
202 id_, params = build_data(
199 self.apikey_regular, 'create_pull_request', **data)
203 self.apikey_regular, 'create_pull_request', **data)
200 response = api_call(self.app, params)
204 response = api_call(self.app, params)
201 expected_message = 'user `somebody` does not exist'
205 expected_message = 'user `somebody` does not exist'
202 assert_error(id_, expected_message, given=response.body)
206 assert_error(id_, expected_message, given=response.body)
203
207
204 @pytest.mark.backends("git", "hg")
208 @pytest.mark.backends("git", "hg")
205 def test_cannot_create_with_reviewers_in_wrong_format(self, backend):
209 def test_cannot_create_with_reviewers_in_wrong_format(self, backend):
206 data = self._prepare_data(backend)
210 data = self._prepare_data(backend)
207 reviewers = ','.join([TEST_USER_REGULAR_LOGIN, TEST_USER_ADMIN_LOGIN])
211 reviewers = ','.join([TEST_USER_REGULAR_LOGIN, TEST_USER_ADMIN_LOGIN])
208 data['reviewers'] = reviewers
212 data['reviewers'] = reviewers
209 id_, params = build_data(
213 id_, params = build_data(
210 self.apikey_regular, 'create_pull_request', **data)
214 self.apikey_regular, 'create_pull_request', **data)
211 response = api_call(self.app, params)
215 response = api_call(self.app, params)
212 expected_message = {u'': '"test_regular,test_admin" is not iterable'}
216 expected_message = {u'': '"test_regular,test_admin" is not iterable'}
213 assert_error(id_, expected_message, given=response.body)
217 assert_error(id_, expected_message, given=response.body)
214
218
215 @pytest.mark.backends("git", "hg")
219 @pytest.mark.backends("git", "hg")
216 def test_create_with_no_commit_hashes(self, backend):
220 def test_create_with_no_commit_hashes(self, backend):
217 data = self._prepare_data(backend)
221 data = self._prepare_data(backend)
218 expected_source_ref = data['source_ref']
222 expected_source_ref = data['source_ref']
219 expected_target_ref = data['target_ref']
223 expected_target_ref = data['target_ref']
220 data['source_ref'] = 'branch:{}'.format(backend.default_branch_name)
224 data['source_ref'] = 'branch:{}'.format(backend.default_branch_name)
221 data['target_ref'] = 'branch:{}'.format(backend.default_branch_name)
225 data['target_ref'] = 'branch:{}'.format(backend.default_branch_name)
222 id_, params = build_data(
226 id_, params = build_data(
223 self.apikey_regular, 'create_pull_request', **data)
227 self.apikey_regular, 'create_pull_request', **data)
224 response = api_call(self.app, params)
228 response = api_call(self.app, params)
225 expected_message = "Created new pull request `{title}`".format(
229 expected_message = "Created new pull request `{title}`".format(
226 title=data['title'])
230 title=data['title'])
227 result = response.json
231 result = response.json
228 assert result['result']['msg'] == expected_message
232 assert result['result']['msg'] == expected_message
229 pull_request_id = result['result']['pull_request_id']
233 pull_request_id = result['result']['pull_request_id']
230 pull_request = PullRequestModel().get(pull_request_id)
234 pull_request = PullRequestModel().get(pull_request_id)
231 assert pull_request.source_ref == expected_source_ref
235 assert pull_request.source_ref == expected_source_ref
232 assert pull_request.target_ref == expected_target_ref
236 assert pull_request.target_ref == expected_target_ref
233
237
234 @pytest.mark.backends("git", "hg")
238 @pytest.mark.backends("git", "hg")
235 @pytest.mark.parametrize("data_key", ["source_repo", "target_repo"])
239 @pytest.mark.parametrize("data_key", ["source_repo", "target_repo"])
236 def test_create_fails_with_wrong_repo(self, backend, data_key):
240 def test_create_fails_with_wrong_repo(self, backend, data_key):
237 repo_name = 'fake-repo'
241 repo_name = 'fake-repo'
238 data = self._prepare_data(backend)
242 data = self._prepare_data(backend)
239 data[data_key] = repo_name
243 data[data_key] = repo_name
240 id_, params = build_data(
244 id_, params = build_data(
241 self.apikey_regular, 'create_pull_request', **data)
245 self.apikey_regular, 'create_pull_request', **data)
242 response = api_call(self.app, params)
246 response = api_call(self.app, params)
243 expected_message = 'repository `{}` does not exist'.format(repo_name)
247 expected_message = 'repository `{}` does not exist'.format(repo_name)
244 assert_error(id_, expected_message, given=response.body)
248 assert_error(id_, expected_message, given=response.body)
245
249
246 @pytest.mark.backends("git", "hg")
250 @pytest.mark.backends("git", "hg")
247 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
251 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
248 def test_create_fails_with_non_existing_branch(self, backend, data_key):
252 def test_create_fails_with_non_existing_branch(self, backend, data_key):
249 branch_name = 'test-branch'
253 branch_name = 'test-branch'
250 data = self._prepare_data(backend)
254 data = self._prepare_data(backend)
251 data[data_key] = "branch:{}".format(branch_name)
255 data[data_key] = "branch:{}".format(branch_name)
252 id_, params = build_data(
256 id_, params = build_data(
253 self.apikey_regular, 'create_pull_request', **data)
257 self.apikey_regular, 'create_pull_request', **data)
254 response = api_call(self.app, params)
258 response = api_call(self.app, params)
255 expected_message = 'The specified value:{type}:`{name}` ' \
259 expected_message = 'The specified value:{type}:`{name}` ' \
256 'does not exist, or is not allowed.'.format(type='branch',
260 'does not exist, or is not allowed.'.format(type='branch',
257 name=branch_name)
261 name=branch_name)
258 assert_error(id_, expected_message, given=response.body)
262 assert_error(id_, expected_message, given=response.body)
259
263
260 @pytest.mark.backends("git", "hg")
264 @pytest.mark.backends("git", "hg")
261 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
265 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
262 def test_create_fails_with_ref_in_a_wrong_format(self, backend, data_key):
266 def test_create_fails_with_ref_in_a_wrong_format(self, backend, data_key):
263 data = self._prepare_data(backend)
267 data = self._prepare_data(backend)
264 ref = 'stange-ref'
268 ref = 'stange-ref'
265 data[data_key] = ref
269 data[data_key] = ref
266 id_, params = build_data(
270 id_, params = build_data(
267 self.apikey_regular, 'create_pull_request', **data)
271 self.apikey_regular, 'create_pull_request', **data)
268 response = api_call(self.app, params)
272 response = api_call(self.app, params)
269 expected_message = (
273 expected_message = (
270 'Ref `{ref}` given in a wrong format. Please check the API'
274 'Ref `{ref}` given in a wrong format. Please check the API'
271 ' documentation for more details'.format(ref=ref))
275 ' documentation for more details'.format(ref=ref))
272 assert_error(id_, expected_message, given=response.body)
276 assert_error(id_, expected_message, given=response.body)
273
277
274 @pytest.mark.backends("git", "hg")
278 @pytest.mark.backends("git", "hg")
275 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
279 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
276 def test_create_fails_with_non_existing_ref(self, backend, data_key):
280 def test_create_fails_with_non_existing_ref(self, backend, data_key):
277 commit_id = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa10'
281 commit_id = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa10'
278 ref = self._get_full_ref(backend, commit_id)
282 ref = self._get_full_ref(backend, commit_id)
279 data = self._prepare_data(backend)
283 data = self._prepare_data(backend)
280 data[data_key] = ref
284 data[data_key] = ref
281 id_, params = build_data(
285 id_, params = build_data(
282 self.apikey_regular, 'create_pull_request', **data)
286 self.apikey_regular, 'create_pull_request', **data)
283 response = api_call(self.app, params)
287 response = api_call(self.app, params)
284 expected_message = 'Ref `{}` does not exist'.format(ref)
288 expected_message = 'Ref `{}` does not exist'.format(ref)
285 assert_error(id_, expected_message, given=response.body)
289 assert_error(id_, expected_message, given=response.body)
286
290
287 @pytest.mark.backends("git", "hg")
291 @pytest.mark.backends("git", "hg")
288 def test_create_fails_when_no_revisions(self, backend):
292 def test_create_fails_when_no_revisions(self, backend):
289 data = self._prepare_data(backend, source_head='initial')
293 data = self._prepare_data(backend, source_head='initial')
290 id_, params = build_data(
294 id_, params = build_data(
291 self.apikey_regular, 'create_pull_request', **data)
295 self.apikey_regular, 'create_pull_request', **data)
292 response = api_call(self.app, params)
296 response = api_call(self.app, params)
293 expected_message = 'no commits found'
297 expected_message = 'no commits found'
294 assert_error(id_, expected_message, given=response.body)
298 assert_error(id_, expected_message, given=response.body)
295
299
296 @pytest.mark.backends("git", "hg")
300 @pytest.mark.backends("git", "hg")
297 def test_create_fails_when_no_permissions(self, backend):
301 def test_create_fails_when_no_permissions(self, backend):
298 data = self._prepare_data(backend)
302 data = self._prepare_data(backend)
299 RepoModel().revoke_user_permission(
303 RepoModel().revoke_user_permission(
300 self.source.repo_name, self.test_user)
304 self.source.repo_name, self.test_user)
301 RepoModel().revoke_user_permission(
305 RepoModel().revoke_user_permission(
302 self.source.repo_name, User.DEFAULT_USER)
306 self.source.repo_name, User.DEFAULT_USER)
303
307
304 id_, params = build_data(
308 id_, params = build_data(
305 self.apikey_regular, 'create_pull_request', **data)
309 self.apikey_regular, 'create_pull_request', **data)
306 response = api_call(self.app, params)
310 response = api_call(self.app, params)
307 expected_message = 'repository `{}` does not exist'.format(
311 expected_message = 'repository `{}` does not exist'.format(
308 self.source.repo_name)
312 self.source.repo_name)
309 assert_error(id_, expected_message, given=response.body)
313 assert_error(id_, expected_message, given=response.body)
310
314
311 def _prepare_data(
315 def _prepare_data(
312 self, backend, source_head='change', target_head='initial'):
316 self, backend, source_head='change', target_head='initial'):
313 commits = [
317 commits = [
314 {'message': 'initial'},
318 {'message': 'initial'},
315 {'message': 'change'},
319 {'message': 'change'},
316 {'message': 'new-feature', 'parents': ['initial']},
320 {'message': 'new-feature', 'parents': ['initial']},
317 ]
321 ]
318 self.commit_ids = backend.create_master_repo(commits)
322 self.commit_ids = backend.create_master_repo(commits)
319 self.source = backend.create_repo(heads=[source_head])
323 self.source = backend.create_repo(heads=[source_head])
320 self.target = backend.create_repo(heads=[target_head])
324 self.target = backend.create_repo(heads=[target_head])
321
325
322 data = {
326 data = {
323 'source_repo': self.source.repo_name,
327 'source_repo': self.source.repo_name,
324 'target_repo': self.target.repo_name,
328 'target_repo': self.target.repo_name,
325 'source_ref': self._get_full_ref(
329 'source_ref': self._get_full_ref(
326 backend, self.commit_ids[source_head]),
330 backend, self.commit_ids[source_head]),
327 'target_ref': self._get_full_ref(
331 'target_ref': self._get_full_ref(
328 backend, self.commit_ids[target_head]),
332 backend, self.commit_ids[target_head]),
329 'title': 'Test PR 1',
333 'title': 'Test PR 1',
330 'description': 'Test'
334 'description': 'Test'
331 }
335 }
332 RepoModel().grant_user_permission(
336 RepoModel().grant_user_permission(
333 self.source.repo_name, self.TEST_USER_LOGIN, 'repository.read')
337 self.source.repo_name, self.TEST_USER_LOGIN, 'repository.read')
334 return data
338 return data
335
339
336 def _get_full_ref(self, backend, commit_id):
340 def _get_full_ref(self, backend, commit_id):
337 return 'branch:{branch}:{commit_id}'.format(
341 return 'branch:{branch}:{commit_id}'.format(
338 branch=backend.default_branch_name, commit_id=commit_id)
342 branch=backend.default_branch_name, commit_id=commit_id)
@@ -1,308 +1,308 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2018 RhodeCode GmbH
3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 RhodeCode task modules, containing all task that suppose to be run
22 RhodeCode task modules, containing all task that suppose to be run
23 by celery daemon
23 by celery daemon
24 """
24 """
25
25
26 import os
26 import os
27 import time
27 import time
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode.lib import audit_logger
30 from rhodecode.lib import audit_logger
31 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask
31 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask
32 from rhodecode.lib.hooks_base import log_create_repository
32 from rhodecode.lib.hooks_base import log_create_repository
33 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
33 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
34 from rhodecode.lib.utils2 import safe_int, str2bool
34 from rhodecode.lib.utils2 import safe_int, str2bool
35 from rhodecode.model.db import Session, IntegrityError, Repository, User, true
35 from rhodecode.model.db import Session, IntegrityError, Repository, User, true
36
36
37
37
38 @async_task(ignore_result=True, base=RequestContextTask)
38 @async_task(ignore_result=True, base=RequestContextTask)
39 def send_email(recipients, subject, body='', html_body='', email_config=None):
39 def send_email(recipients, subject, body='', html_body='', email_config=None):
40 """
40 """
41 Sends an email with defined parameters from the .ini files.
41 Sends an email with defined parameters from the .ini files.
42
42
43 :param recipients: list of recipients, it this is empty the defined email
43 :param recipients: list of recipients, it this is empty the defined email
44 address from field 'email_to' is used instead
44 address from field 'email_to' is used instead
45 :param subject: subject of the mail
45 :param subject: subject of the mail
46 :param body: body of the mail
46 :param body: body of the mail
47 :param html_body: html version of body
47 :param html_body: html version of body
48 """
48 """
49 log = get_logger(send_email)
49 log = get_logger(send_email)
50
50
51 email_config = email_config or rhodecode.CONFIG
51 email_config = email_config or rhodecode.CONFIG
52
52
53 mail_server = email_config.get('smtp_server') or None
53 mail_server = email_config.get('smtp_server') or None
54 if mail_server is None:
54 if mail_server is None:
55 log.error("SMTP server information missing. Sending email failed. "
55 log.error("SMTP server information missing. Sending email failed. "
56 "Make sure that `smtp_server` variable is configured "
56 "Make sure that `smtp_server` variable is configured "
57 "inside the .ini file")
57 "inside the .ini file")
58 return False
58 return False
59
59
60 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
60 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
61 if not recipients:
61 if not recipients:
62 # if recipients are not defined we send to email_config + all admins
62 # if recipients are not defined we send to email_config + all admins
63 admins = []
63 admins = []
64 for u in User.query().filter(User.admin == true()).all():
64 for u in User.query().filter(User.admin == true()).all():
65 if u.email:
65 if u.email:
66 admins.append(u.email)
66 admins.append(u.email)
67 recipients = []
67 recipients = []
68 config_email = email_config.get('email_to')
68 config_email = email_config.get('email_to')
69 if config_email:
69 if config_email:
70 recipients += [config_email]
70 recipients += [config_email]
71 recipients += admins
71 recipients += admins
72
72
73 mail_from = email_config.get('app_email_from', 'RhodeCode')
73 mail_from = email_config.get('app_email_from', 'RhodeCode')
74 user = email_config.get('smtp_username')
74 user = email_config.get('smtp_username')
75 passwd = email_config.get('smtp_password')
75 passwd = email_config.get('smtp_password')
76 mail_port = email_config.get('smtp_port')
76 mail_port = email_config.get('smtp_port')
77 tls = str2bool(email_config.get('smtp_use_tls'))
77 tls = str2bool(email_config.get('smtp_use_tls'))
78 ssl = str2bool(email_config.get('smtp_use_ssl'))
78 ssl = str2bool(email_config.get('smtp_use_ssl'))
79 debug = str2bool(email_config.get('debug'))
79 debug = str2bool(email_config.get('debug'))
80 smtp_auth = email_config.get('smtp_auth')
80 smtp_auth = email_config.get('smtp_auth')
81
81
82 try:
82 try:
83 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
83 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
84 mail_port, ssl, tls, debug=debug)
84 mail_port, ssl, tls, debug=debug)
85 m.send(recipients, subject, body, html_body)
85 m.send(recipients, subject, body, html_body)
86 except Exception:
86 except Exception:
87 log.exception('Mail sending failed')
87 log.exception('Mail sending failed')
88 return False
88 return False
89 return True
89 return True
90
90
91
91
92 @async_task(ignore_result=True, base=RequestContextTask)
92 @async_task(ignore_result=True, base=RequestContextTask)
93 def create_repo(form_data, cur_user):
93 def create_repo(form_data, cur_user):
94 from rhodecode.model.repo import RepoModel
94 from rhodecode.model.repo import RepoModel
95 from rhodecode.model.user import UserModel
95 from rhodecode.model.user import UserModel
96 from rhodecode.model.settings import SettingsModel
96 from rhodecode.model.settings import SettingsModel
97
97
98 log = get_logger(create_repo)
98 log = get_logger(create_repo)
99
99
100 cur_user = UserModel()._get_user(cur_user)
100 cur_user = UserModel()._get_user(cur_user)
101 owner = cur_user
101 owner = cur_user
102
102
103 repo_name = form_data['repo_name']
103 repo_name = form_data['repo_name']
104 repo_name_full = form_data['repo_name_full']
104 repo_name_full = form_data['repo_name_full']
105 repo_type = form_data['repo_type']
105 repo_type = form_data['repo_type']
106 description = form_data['repo_description']
106 description = form_data['repo_description']
107 private = form_data['repo_private']
107 private = form_data['repo_private']
108 clone_uri = form_data.get('clone_uri')
108 clone_uri = form_data.get('clone_uri')
109 repo_group = safe_int(form_data['repo_group'])
109 repo_group = safe_int(form_data['repo_group'])
110 landing_rev = form_data['repo_landing_rev']
110 landing_rev = form_data['repo_landing_rev']
111 copy_fork_permissions = form_data.get('copy_permissions')
111 copy_fork_permissions = form_data.get('copy_permissions')
112 copy_group_permissions = form_data.get('repo_copy_permissions')
112 copy_group_permissions = form_data.get('repo_copy_permissions')
113 fork_of = form_data.get('fork_parent_id')
113 fork_of = form_data.get('fork_parent_id')
114 state = form_data.get('repo_state', Repository.STATE_PENDING)
114 state = form_data.get('repo_state', Repository.STATE_PENDING)
115
115
116 # repo creation defaults, private and repo_type are filled in form
116 # repo creation defaults, private and repo_type are filled in form
117 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
117 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
118 enable_statistics = form_data.get(
118 enable_statistics = form_data.get(
119 'enable_statistics', defs.get('repo_enable_statistics'))
119 'enable_statistics', defs.get('repo_enable_statistics'))
120 enable_locking = form_data.get(
120 enable_locking = form_data.get(
121 'enable_locking', defs.get('repo_enable_locking'))
121 'enable_locking', defs.get('repo_enable_locking'))
122 enable_downloads = form_data.get(
122 enable_downloads = form_data.get(
123 'enable_downloads', defs.get('repo_enable_downloads'))
123 'enable_downloads', defs.get('repo_enable_downloads'))
124
124
125 try:
125 try:
126 repo = RepoModel()._create_repo(
126 RepoModel()._create_repo(
127 repo_name=repo_name_full,
127 repo_name=repo_name_full,
128 repo_type=repo_type,
128 repo_type=repo_type,
129 description=description,
129 description=description,
130 owner=owner,
130 owner=owner,
131 private=private,
131 private=private,
132 clone_uri=clone_uri,
132 clone_uri=clone_uri,
133 repo_group=repo_group,
133 repo_group=repo_group,
134 landing_rev=landing_rev,
134 landing_rev=landing_rev,
135 fork_of=fork_of,
135 fork_of=fork_of,
136 copy_fork_permissions=copy_fork_permissions,
136 copy_fork_permissions=copy_fork_permissions,
137 copy_group_permissions=copy_group_permissions,
137 copy_group_permissions=copy_group_permissions,
138 enable_statistics=enable_statistics,
138 enable_statistics=enable_statistics,
139 enable_locking=enable_locking,
139 enable_locking=enable_locking,
140 enable_downloads=enable_downloads,
140 enable_downloads=enable_downloads,
141 state=state
141 state=state
142 )
142 )
143 Session().commit()
143 Session().commit()
144
144
145 # now create this repo on Filesystem
145 # now create this repo on Filesystem
146 RepoModel()._create_filesystem_repo(
146 RepoModel()._create_filesystem_repo(
147 repo_name=repo_name,
147 repo_name=repo_name,
148 repo_type=repo_type,
148 repo_type=repo_type,
149 repo_group=RepoModel()._get_repo_group(repo_group),
149 repo_group=RepoModel()._get_repo_group(repo_group),
150 clone_uri=clone_uri,
150 clone_uri=clone_uri,
151 )
151 )
152 repo = Repository.get_by_repo_name(repo_name_full)
152 repo = Repository.get_by_repo_name(repo_name_full)
153 log_create_repository(created_by=owner.username, **repo.get_dict())
153 log_create_repository(created_by=owner.username, **repo.get_dict())
154
154
155 # update repo commit caches initially
155 # update repo commit caches initially
156 repo.update_commit_cache()
156 repo.update_commit_cache()
157
157
158 # set new created state
158 # set new created state
159 repo.set_state(Repository.STATE_CREATED)
159 repo.set_state(Repository.STATE_CREATED)
160 repo_id = repo.repo_id
160 repo_id = repo.repo_id
161 repo_data = repo.get_api_data()
161 repo_data = repo.get_api_data()
162
162
163 audit_logger.store(
163 audit_logger.store(
164 'repo.create', action_data={'data': repo_data},
164 'repo.create', action_data={'data': repo_data},
165 user=cur_user,
165 user=cur_user,
166 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
166 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
167
167
168 Session().commit()
168 Session().commit()
169 except Exception as e:
169 except Exception as e:
170 log.warning('Exception occurred when creating repository, '
170 log.warning('Exception occurred when creating repository, '
171 'doing cleanup...', exc_info=True)
171 'doing cleanup...', exc_info=True)
172 if isinstance(e, IntegrityError):
172 if isinstance(e, IntegrityError):
173 Session().rollback()
173 Session().rollback()
174
174
175 # rollback things manually !
175 # rollback things manually !
176 repo = Repository.get_by_repo_name(repo_name_full)
176 repo = Repository.get_by_repo_name(repo_name_full)
177 if repo:
177 if repo:
178 Repository.delete(repo.repo_id)
178 Repository.delete(repo.repo_id)
179 Session().commit()
179 Session().commit()
180 RepoModel()._delete_filesystem_repo(repo)
180 RepoModel()._delete_filesystem_repo(repo)
181 log.info('Cleanup of repo %s finished', repo_name_full)
181 log.info('Cleanup of repo %s finished', repo_name_full)
182 raise
182 raise
183
183
184 return True
184 return True
185
185
186
186
187 @async_task(ignore_result=True, base=RequestContextTask)
187 @async_task(ignore_result=True, base=RequestContextTask)
188 def create_repo_fork(form_data, cur_user):
188 def create_repo_fork(form_data, cur_user):
189 """
189 """
190 Creates a fork of repository using internal VCS methods
190 Creates a fork of repository using internal VCS methods
191 """
191 """
192 from rhodecode.model.repo import RepoModel
192 from rhodecode.model.repo import RepoModel
193 from rhodecode.model.user import UserModel
193 from rhodecode.model.user import UserModel
194
194
195 log = get_logger(create_repo_fork)
195 log = get_logger(create_repo_fork)
196
196
197 cur_user = UserModel()._get_user(cur_user)
197 cur_user = UserModel()._get_user(cur_user)
198 owner = cur_user
198 owner = cur_user
199
199
200 repo_name = form_data['repo_name'] # fork in this case
200 repo_name = form_data['repo_name'] # fork in this case
201 repo_name_full = form_data['repo_name_full']
201 repo_name_full = form_data['repo_name_full']
202 repo_type = form_data['repo_type']
202 repo_type = form_data['repo_type']
203 description = form_data['description']
203 description = form_data['description']
204 private = form_data['private']
204 private = form_data['private']
205 clone_uri = form_data.get('clone_uri')
205 clone_uri = form_data.get('clone_uri')
206 repo_group = safe_int(form_data['repo_group'])
206 repo_group = safe_int(form_data['repo_group'])
207 landing_rev = form_data['landing_rev']
207 landing_rev = form_data['landing_rev']
208 copy_fork_permissions = form_data.get('copy_permissions')
208 copy_fork_permissions = form_data.get('copy_permissions')
209 fork_id = safe_int(form_data.get('fork_parent_id'))
209 fork_id = safe_int(form_data.get('fork_parent_id'))
210
210
211 try:
211 try:
212 fork_of = RepoModel()._get_repo(fork_id)
212 fork_of = RepoModel()._get_repo(fork_id)
213 RepoModel()._create_repo(
213 RepoModel()._create_repo(
214 repo_name=repo_name_full,
214 repo_name=repo_name_full,
215 repo_type=repo_type,
215 repo_type=repo_type,
216 description=description,
216 description=description,
217 owner=owner,
217 owner=owner,
218 private=private,
218 private=private,
219 clone_uri=clone_uri,
219 clone_uri=clone_uri,
220 repo_group=repo_group,
220 repo_group=repo_group,
221 landing_rev=landing_rev,
221 landing_rev=landing_rev,
222 fork_of=fork_of,
222 fork_of=fork_of,
223 copy_fork_permissions=copy_fork_permissions
223 copy_fork_permissions=copy_fork_permissions
224 )
224 )
225
225
226 Session().commit()
226 Session().commit()
227
227
228 base_path = Repository.base_path()
228 base_path = Repository.base_path()
229 source_repo_path = os.path.join(base_path, fork_of.repo_name)
229 source_repo_path = os.path.join(base_path, fork_of.repo_name)
230
230
231 # now create this repo on Filesystem
231 # now create this repo on Filesystem
232 RepoModel()._create_filesystem_repo(
232 RepoModel()._create_filesystem_repo(
233 repo_name=repo_name,
233 repo_name=repo_name,
234 repo_type=repo_type,
234 repo_type=repo_type,
235 repo_group=RepoModel()._get_repo_group(repo_group),
235 repo_group=RepoModel()._get_repo_group(repo_group),
236 clone_uri=source_repo_path,
236 clone_uri=source_repo_path,
237 )
237 )
238 repo = Repository.get_by_repo_name(repo_name_full)
238 repo = Repository.get_by_repo_name(repo_name_full)
239 log_create_repository(created_by=owner.username, **repo.get_dict())
239 log_create_repository(created_by=owner.username, **repo.get_dict())
240
240
241 # update repo commit caches initially
241 # update repo commit caches initially
242 config = repo._config
242 config = repo._config
243 config.set('extensions', 'largefiles', '')
243 config.set('extensions', 'largefiles', '')
244 repo.update_commit_cache(config=config)
244 repo.update_commit_cache(config=config)
245
245
246 # set new created state
246 # set new created state
247 repo.set_state(Repository.STATE_CREATED)
247 repo.set_state(Repository.STATE_CREATED)
248
248
249 repo_id = repo.repo_id
249 repo_id = repo.repo_id
250 repo_data = repo.get_api_data()
250 repo_data = repo.get_api_data()
251 audit_logger.store(
251 audit_logger.store(
252 'repo.fork', action_data={'data': repo_data},
252 'repo.fork', action_data={'data': repo_data},
253 user=cur_user,
253 user=cur_user,
254 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
254 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
255
255
256 Session().commit()
256 Session().commit()
257 except Exception as e:
257 except Exception as e:
258 log.warning('Exception occurred when forking repository, '
258 log.warning('Exception occurred when forking repository, '
259 'doing cleanup...', exc_info=True)
259 'doing cleanup...', exc_info=True)
260 if isinstance(e, IntegrityError):
260 if isinstance(e, IntegrityError):
261 Session().rollback()
261 Session().rollback()
262
262
263 # rollback things manually !
263 # rollback things manually !
264 repo = Repository.get_by_repo_name(repo_name_full)
264 repo = Repository.get_by_repo_name(repo_name_full)
265 if repo:
265 if repo:
266 Repository.delete(repo.repo_id)
266 Repository.delete(repo.repo_id)
267 Session().commit()
267 Session().commit()
268 RepoModel()._delete_filesystem_repo(repo)
268 RepoModel()._delete_filesystem_repo(repo)
269 log.info('Cleanup of repo %s finished', repo_name_full)
269 log.info('Cleanup of repo %s finished', repo_name_full)
270 raise
270 raise
271
271
272 return True
272 return True
273
273
274
274
275 @async_task(ignore_result=True)
275 @async_task(ignore_result=True)
276 def repo_maintenance(repoid):
276 def repo_maintenance(repoid):
277 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
277 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
278 log = get_logger(repo_maintenance)
278 log = get_logger(repo_maintenance)
279 repo = Repository.get_by_id_or_repo_name(repoid)
279 repo = Repository.get_by_id_or_repo_name(repoid)
280 if repo:
280 if repo:
281 maintenance = repo_maintenance_lib.RepoMaintenance()
281 maintenance = repo_maintenance_lib.RepoMaintenance()
282 tasks = maintenance.get_tasks_for_repo(repo)
282 tasks = maintenance.get_tasks_for_repo(repo)
283 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
283 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
284 executed_types = maintenance.execute(repo)
284 executed_types = maintenance.execute(repo)
285 log.debug('Got execution results %s', executed_types)
285 log.debug('Got execution results %s', executed_types)
286 else:
286 else:
287 log.debug('Repo `%s` not found or without a clone_url', repoid)
287 log.debug('Repo `%s` not found or without a clone_url', repoid)
288
288
289
289
290 @async_task(ignore_result=True)
290 @async_task(ignore_result=True)
291 def check_for_update():
291 def check_for_update():
292 from rhodecode.model.update import UpdateModel
292 from rhodecode.model.update import UpdateModel
293 update_url = UpdateModel().get_update_url()
293 update_url = UpdateModel().get_update_url()
294 cur_ver = rhodecode.__version__
294 cur_ver = rhodecode.__version__
295
295
296 try:
296 try:
297 data = UpdateModel().get_update_data(update_url)
297 data = UpdateModel().get_update_data(update_url)
298 latest = data['versions'][0]
298 latest = data['versions'][0]
299 UpdateModel().store_version(latest['version'])
299 UpdateModel().store_version(latest['version'])
300 except Exception:
300 except Exception:
301 pass
301 pass
302
302
303
303
304 @async_task(ignore_result=False)
304 @async_task(ignore_result=False)
305 def beat_check(*args, **kwargs):
305 def beat_check(*args, **kwargs):
306 log = get_logger(beat_check)
306 log = get_logger(beat_check)
307 log.info('Got args: %r and kwargs %r', args, kwargs)
307 log.info('Got args: %r and kwargs %r', args, kwargs)
308 return time.time()
308 return time.time()
@@ -1,1009 +1,999 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference)
38 MergeFailureReason, Reference)
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError,
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45
45
46
46
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 class GitRepository(BaseRepository):
52 class GitRepository(BaseRepository):
53 """
53 """
54 Git repository backend.
54 Git repository backend.
55 """
55 """
56 DEFAULT_BRANCH_NAME = 'master'
56 DEFAULT_BRANCH_NAME = 'master'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 update_after_clone=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self.with_wire = with_wire
65 self.with_wire = with_wire
66
66
67 self._init_repo(create, src_url, update_after_clone, bare)
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68
68
69 # caches
69 # caches
70 self._commit_ids = {}
70 self._commit_ids = {}
71
71
72 @LazyProperty
72 @LazyProperty
73 def _remote(self):
73 def _remote(self):
74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
75
75
76 @LazyProperty
76 @LazyProperty
77 def bare(self):
77 def bare(self):
78 return self._remote.bare()
78 return self._remote.bare()
79
79
80 @LazyProperty
80 @LazyProperty
81 def head(self):
81 def head(self):
82 return self._remote.head()
82 return self._remote.head()
83
83
84 @LazyProperty
84 @LazyProperty
85 def commit_ids(self):
85 def commit_ids(self):
86 """
86 """
87 Returns list of commit ids, in ascending order. Being lazy
87 Returns list of commit ids, in ascending order. Being lazy
88 attribute allows external tools to inject commit ids from cache.
88 attribute allows external tools to inject commit ids from cache.
89 """
89 """
90 commit_ids = self._get_all_commit_ids()
90 commit_ids = self._get_all_commit_ids()
91 self._rebuild_cache(commit_ids)
91 self._rebuild_cache(commit_ids)
92 return commit_ids
92 return commit_ids
93
93
94 def _rebuild_cache(self, commit_ids):
94 def _rebuild_cache(self, commit_ids):
95 self._commit_ids = dict((commit_id, index)
95 self._commit_ids = dict((commit_id, index)
96 for index, commit_id in enumerate(commit_ids))
96 for index, commit_id in enumerate(commit_ids))
97
97
98 def run_git_command(self, cmd, **opts):
98 def run_git_command(self, cmd, **opts):
99 """
99 """
100 Runs given ``cmd`` as git command and returns tuple
100 Runs given ``cmd`` as git command and returns tuple
101 (stdout, stderr).
101 (stdout, stderr).
102
102
103 :param cmd: git command to be executed
103 :param cmd: git command to be executed
104 :param opts: env options to pass into Subprocess command
104 :param opts: env options to pass into Subprocess command
105 """
105 """
106 if not isinstance(cmd, list):
106 if not isinstance(cmd, list):
107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108
108
109 skip_stderr_log = opts.pop('skip_stderr_log', False)
109 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 out, err = self._remote.run_git_command(cmd, **opts)
110 out, err = self._remote.run_git_command(cmd, **opts)
111 if err and not skip_stderr_log:
111 if err and not skip_stderr_log:
112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 return out, err
113 return out, err
114
114
115 @staticmethod
115 @staticmethod
116 def check_url(url, config):
116 def check_url(url, config):
117 """
117 """
118 Function will check given url and try to verify if it's a valid
118 Function will check given url and try to verify if it's a valid
119 link. Sometimes it may happened that git will issue basic
119 link. Sometimes it may happened that git will issue basic
120 auth request that can cause whole API to hang when used from python
120 auth request that can cause whole API to hang when used from python
121 or other external calls.
121 or other external calls.
122
122
123 On failures it'll raise urllib2.HTTPError, exception is also thrown
123 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 when the return code is non 200
124 when the return code is non 200
125 """
125 """
126 # check first if it's not an url
126 # check first if it's not an url
127 if os.path.isdir(url) or url.startswith('file:'):
127 if os.path.isdir(url) or url.startswith('file:'):
128 return True
128 return True
129
129
130 if '+' in url.split('://', 1)[0]:
130 if '+' in url.split('://', 1)[0]:
131 url = url.split('+', 1)[1]
131 url = url.split('+', 1)[1]
132
132
133 # Request the _remote to verify the url
133 # Request the _remote to verify the url
134 return connection.Git.check_url(url, config.serialize())
134 return connection.Git.check_url(url, config.serialize())
135
135
136 @staticmethod
136 @staticmethod
137 def is_valid_repository(path):
137 def is_valid_repository(path):
138 if os.path.isdir(os.path.join(path, '.git')):
138 if os.path.isdir(os.path.join(path, '.git')):
139 return True
139 return True
140 # check case of bare repository
140 # check case of bare repository
141 try:
141 try:
142 GitRepository(path)
142 GitRepository(path)
143 return True
143 return True
144 except VCSError:
144 except VCSError:
145 pass
145 pass
146 return False
146 return False
147
147
148 def _init_repo(self, create, src_url=None, update_after_clone=False,
148 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 bare=False):
149 bare=False):
150 if create and os.path.exists(self.path):
150 if create and os.path.exists(self.path):
151 raise RepositoryError(
151 raise RepositoryError(
152 "Cannot create repository at %s, location already exist"
152 "Cannot create repository at %s, location already exist"
153 % self.path)
153 % self.path)
154
154
155 if bare and do_workspace_checkout:
156 raise RepositoryError("Cannot update a bare repository")
155 try:
157 try:
156 if create and src_url:
158
159 if src_url:
160 # check URL before any actions
157 GitRepository.check_url(src_url, self.config)
161 GitRepository.check_url(src_url, self.config)
158 self.clone(src_url, update_after_clone, bare)
162
159 elif create:
163 if create:
160 os.makedirs(self.path, mode=0755)
164 os.makedirs(self.path, mode=0755)
161
165
162 if bare:
166 if bare:
163 self._remote.init_bare()
167 self._remote.init_bare()
164 else:
168 else:
165 self._remote.init()
169 self._remote.init()
170
171 if src_url and bare:
172 # bare repository only allows a fetch and checkout is not allowed
173 self.fetch(src_url, commit_ids=None)
174 elif src_url:
175 self.pull(src_url, commit_ids=None,
176 update_after=do_workspace_checkout)
177
166 else:
178 else:
167 if not self._remote.assert_correct_path():
179 if not self._remote.assert_correct_path():
168 raise RepositoryError(
180 raise RepositoryError(
169 'Path "%s" does not contain a Git repository' %
181 'Path "%s" does not contain a Git repository' %
170 (self.path,))
182 (self.path,))
171
183
172 # TODO: johbo: check if we have to translate the OSError here
184 # TODO: johbo: check if we have to translate the OSError here
173 except OSError as err:
185 except OSError as err:
174 raise RepositoryError(err)
186 raise RepositoryError(err)
175
187
176 def _get_all_commit_ids(self, filters=None):
188 def _get_all_commit_ids(self, filters=None):
177 # we must check if this repo is not empty, since later command
189 # we must check if this repo is not empty, since later command
178 # fails if it is. And it's cheaper to ask than throw the subprocess
190 # fails if it is. And it's cheaper to ask than throw the subprocess
179 # errors
191 # errors
180
192
181 head = self._remote.head(show_exc=False)
193 head = self._remote.head(show_exc=False)
182 if not head:
194 if not head:
183 return []
195 return []
184
196
185 rev_filter = ['--branches', '--tags']
197 rev_filter = ['--branches', '--tags']
186 extra_filter = []
198 extra_filter = []
187
199
188 if filters:
200 if filters:
189 if filters.get('since'):
201 if filters.get('since'):
190 extra_filter.append('--since=%s' % (filters['since']))
202 extra_filter.append('--since=%s' % (filters['since']))
191 if filters.get('until'):
203 if filters.get('until'):
192 extra_filter.append('--until=%s' % (filters['until']))
204 extra_filter.append('--until=%s' % (filters['until']))
193 if filters.get('branch_name'):
205 if filters.get('branch_name'):
194 rev_filter = ['--tags']
206 rev_filter = ['--tags']
195 extra_filter.append(filters['branch_name'])
207 extra_filter.append(filters['branch_name'])
196 rev_filter.extend(extra_filter)
208 rev_filter.extend(extra_filter)
197
209
198 # if filters.get('start') or filters.get('end'):
210 # if filters.get('start') or filters.get('end'):
199 # # skip is offset, max-count is limit
211 # # skip is offset, max-count is limit
200 # if filters.get('start'):
212 # if filters.get('start'):
201 # extra_filter += ' --skip=%s' % filters['start']
213 # extra_filter += ' --skip=%s' % filters['start']
202 # if filters.get('end'):
214 # if filters.get('end'):
203 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
215 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
204
216
205 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
217 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
206 try:
218 try:
207 output, __ = self.run_git_command(cmd)
219 output, __ = self.run_git_command(cmd)
208 except RepositoryError:
220 except RepositoryError:
209 # Can be raised for empty repositories
221 # Can be raised for empty repositories
210 return []
222 return []
211 return output.splitlines()
223 return output.splitlines()
212
224
213 def _get_commit_id(self, commit_id_or_idx):
225 def _get_commit_id(self, commit_id_or_idx):
214 def is_null(value):
226 def is_null(value):
215 return len(value) == commit_id_or_idx.count('0')
227 return len(value) == commit_id_or_idx.count('0')
216
228
217 if self.is_empty():
229 if self.is_empty():
218 raise EmptyRepositoryError("There are no commits yet")
230 raise EmptyRepositoryError("There are no commits yet")
219
231
220 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
232 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
221 return self.commit_ids[-1]
233 return self.commit_ids[-1]
222
234
223 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
235 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
224 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
236 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
225 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
237 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
226 try:
238 try:
227 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
239 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
228 except Exception:
240 except Exception:
229 msg = "Commit %s does not exist for %s" % (
241 msg = "Commit %s does not exist for %s" % (
230 commit_id_or_idx, self)
242 commit_id_or_idx, self)
231 raise CommitDoesNotExistError(msg)
243 raise CommitDoesNotExistError(msg)
232
244
233 elif is_bstr:
245 elif is_bstr:
234 # check full path ref, eg. refs/heads/master
246 # check full path ref, eg. refs/heads/master
235 ref_id = self._refs.get(commit_id_or_idx)
247 ref_id = self._refs.get(commit_id_or_idx)
236 if ref_id:
248 if ref_id:
237 return ref_id
249 return ref_id
238
250
239 # check branch name
251 # check branch name
240 branch_ids = self.branches.values()
252 branch_ids = self.branches.values()
241 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
253 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
242 if ref_id:
254 if ref_id:
243 return ref_id
255 return ref_id
244
256
245 # check tag name
257 # check tag name
246 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
258 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
247 if ref_id:
259 if ref_id:
248 return ref_id
260 return ref_id
249
261
250 if (not SHA_PATTERN.match(commit_id_or_idx) or
262 if (not SHA_PATTERN.match(commit_id_or_idx) or
251 commit_id_or_idx not in self.commit_ids):
263 commit_id_or_idx not in self.commit_ids):
252 msg = "Commit %s does not exist for %s" % (
264 msg = "Commit %s does not exist for %s" % (
253 commit_id_or_idx, self)
265 commit_id_or_idx, self)
254 raise CommitDoesNotExistError(msg)
266 raise CommitDoesNotExistError(msg)
255
267
256 # Ensure we return full id
268 # Ensure we return full id
257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
258 raise CommitDoesNotExistError(
270 raise CommitDoesNotExistError(
259 "Given commit id %s not recognized" % commit_id_or_idx)
271 "Given commit id %s not recognized" % commit_id_or_idx)
260 return commit_id_or_idx
272 return commit_id_or_idx
261
273
262 def get_hook_location(self):
274 def get_hook_location(self):
263 """
275 """
264 returns absolute path to location where hooks are stored
276 returns absolute path to location where hooks are stored
265 """
277 """
266 loc = os.path.join(self.path, 'hooks')
278 loc = os.path.join(self.path, 'hooks')
267 if not self.bare:
279 if not self.bare:
268 loc = os.path.join(self.path, '.git', 'hooks')
280 loc = os.path.join(self.path, '.git', 'hooks')
269 return loc
281 return loc
270
282
271 @LazyProperty
283 @LazyProperty
272 def last_change(self):
284 def last_change(self):
273 """
285 """
274 Returns last change made on this repository as
286 Returns last change made on this repository as
275 `datetime.datetime` object.
287 `datetime.datetime` object.
276 """
288 """
277 try:
289 try:
278 return self.get_commit().date
290 return self.get_commit().date
279 except RepositoryError:
291 except RepositoryError:
280 tzoffset = makedate()[1]
292 tzoffset = makedate()[1]
281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
282
294
283 def _get_fs_mtime(self):
295 def _get_fs_mtime(self):
284 idx_loc = '' if self.bare else '.git'
296 idx_loc = '' if self.bare else '.git'
285 # fallback to filesystem
297 # fallback to filesystem
286 in_path = os.path.join(self.path, idx_loc, "index")
298 in_path = os.path.join(self.path, idx_loc, "index")
287 he_path = os.path.join(self.path, idx_loc, "HEAD")
299 he_path = os.path.join(self.path, idx_loc, "HEAD")
288 if os.path.exists(in_path):
300 if os.path.exists(in_path):
289 return os.stat(in_path).st_mtime
301 return os.stat(in_path).st_mtime
290 else:
302 else:
291 return os.stat(he_path).st_mtime
303 return os.stat(he_path).st_mtime
292
304
293 @LazyProperty
305 @LazyProperty
294 def description(self):
306 def description(self):
295 description = self._remote.get_description()
307 description = self._remote.get_description()
296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
297
309
298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
299 if self.is_empty():
311 if self.is_empty():
300 return OrderedDict()
312 return OrderedDict()
301
313
302 result = []
314 result = []
303 for ref, sha in self._refs.iteritems():
315 for ref, sha in self._refs.iteritems():
304 if ref.startswith(prefix):
316 if ref.startswith(prefix):
305 ref_name = ref
317 ref_name = ref
306 if strip_prefix:
318 if strip_prefix:
307 ref_name = ref[len(prefix):]
319 ref_name = ref[len(prefix):]
308 result.append((safe_unicode(ref_name), sha))
320 result.append((safe_unicode(ref_name), sha))
309
321
310 def get_name(entry):
322 def get_name(entry):
311 return entry[0]
323 return entry[0]
312
324
313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
314
326
315 def _get_branches(self):
327 def _get_branches(self):
316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
317
329
318 @LazyProperty
330 @LazyProperty
319 def branches(self):
331 def branches(self):
320 return self._get_branches()
332 return self._get_branches()
321
333
322 @LazyProperty
334 @LazyProperty
323 def branches_closed(self):
335 def branches_closed(self):
324 return {}
336 return {}
325
337
326 @LazyProperty
338 @LazyProperty
327 def bookmarks(self):
339 def bookmarks(self):
328 return {}
340 return {}
329
341
330 @LazyProperty
342 @LazyProperty
331 def branches_all(self):
343 def branches_all(self):
332 all_branches = {}
344 all_branches = {}
333 all_branches.update(self.branches)
345 all_branches.update(self.branches)
334 all_branches.update(self.branches_closed)
346 all_branches.update(self.branches_closed)
335 return all_branches
347 return all_branches
336
348
337 @LazyProperty
349 @LazyProperty
338 def tags(self):
350 def tags(self):
339 return self._get_tags()
351 return self._get_tags()
340
352
341 def _get_tags(self):
353 def _get_tags(self):
342 return self._get_refs_entries(
354 return self._get_refs_entries(
343 prefix='refs/tags/', strip_prefix=True, reverse=True)
355 prefix='refs/tags/', strip_prefix=True, reverse=True)
344
356
345 def tag(self, name, user, commit_id=None, message=None, date=None,
357 def tag(self, name, user, commit_id=None, message=None, date=None,
346 **kwargs):
358 **kwargs):
347 # TODO: fix this method to apply annotated tags correct with message
359 # TODO: fix this method to apply annotated tags correct with message
348 """
360 """
349 Creates and returns a tag for the given ``commit_id``.
361 Creates and returns a tag for the given ``commit_id``.
350
362
351 :param name: name for new tag
363 :param name: name for new tag
352 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
353 :param commit_id: commit id for which new tag would be created
365 :param commit_id: commit id for which new tag would be created
354 :param message: message of the tag's commit
366 :param message: message of the tag's commit
355 :param date: date of tag's commit
367 :param date: date of tag's commit
356
368
357 :raises TagAlreadyExistError: if tag with same name already exists
369 :raises TagAlreadyExistError: if tag with same name already exists
358 """
370 """
359 if name in self.tags:
371 if name in self.tags:
360 raise TagAlreadyExistError("Tag %s already exists" % name)
372 raise TagAlreadyExistError("Tag %s already exists" % name)
361 commit = self.get_commit(commit_id=commit_id)
373 commit = self.get_commit(commit_id=commit_id)
362 message = message or "Added tag %s for commit %s" % (
374 message = message or "Added tag %s for commit %s" % (
363 name, commit.raw_id)
375 name, commit.raw_id)
364 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
365
377
366 self._refs = self._get_refs()
378 self._refs = self._get_refs()
367 self.tags = self._get_tags()
379 self.tags = self._get_tags()
368 return commit
380 return commit
369
381
370 def remove_tag(self, name, user, message=None, date=None):
382 def remove_tag(self, name, user, message=None, date=None):
371 """
383 """
372 Removes tag with the given ``name``.
384 Removes tag with the given ``name``.
373
385
374 :param name: name of the tag to be removed
386 :param name: name of the tag to be removed
375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 :param message: message of the tag's removal commit
388 :param message: message of the tag's removal commit
377 :param date: date of tag's removal commit
389 :param date: date of tag's removal commit
378
390
379 :raises TagDoesNotExistError: if tag with given name does not exists
391 :raises TagDoesNotExistError: if tag with given name does not exists
380 """
392 """
381 if name not in self.tags:
393 if name not in self.tags:
382 raise TagDoesNotExistError("Tag %s does not exist" % name)
394 raise TagDoesNotExistError("Tag %s does not exist" % name)
383 tagpath = vcspath.join(
395 tagpath = vcspath.join(
384 self._remote.get_refs_path(), 'refs', 'tags', name)
396 self._remote.get_refs_path(), 'refs', 'tags', name)
385 try:
397 try:
386 os.remove(tagpath)
398 os.remove(tagpath)
387 self._refs = self._get_refs()
399 self._refs = self._get_refs()
388 self.tags = self._get_tags()
400 self.tags = self._get_tags()
389 except OSError as e:
401 except OSError as e:
390 raise RepositoryError(e.strerror)
402 raise RepositoryError(e.strerror)
391
403
392 def _get_refs(self):
404 def _get_refs(self):
393 return self._remote.get_refs()
405 return self._remote.get_refs()
394
406
395 @LazyProperty
407 @LazyProperty
396 def _refs(self):
408 def _refs(self):
397 return self._get_refs()
409 return self._get_refs()
398
410
399 @property
411 @property
400 def _ref_tree(self):
412 def _ref_tree(self):
401 node = tree = {}
413 node = tree = {}
402 for ref, sha in self._refs.iteritems():
414 for ref, sha in self._refs.iteritems():
403 path = ref.split('/')
415 path = ref.split('/')
404 for bit in path[:-1]:
416 for bit in path[:-1]:
405 node = node.setdefault(bit, {})
417 node = node.setdefault(bit, {})
406 node[path[-1]] = sha
418 node[path[-1]] = sha
407 node = tree
419 node = tree
408 return tree
420 return tree
409
421
410 def get_remote_ref(self, ref_name):
422 def get_remote_ref(self, ref_name):
411 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
412 try:
424 try:
413 return self._refs[ref_key]
425 return self._refs[ref_key]
414 except Exception:
426 except Exception:
415 return
427 return
416
428
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
418 """
430 """
419 Returns `GitCommit` object representing commit from git repository
431 Returns `GitCommit` object representing commit from git repository
420 at the given `commit_id` or head (most recent commit) if None given.
432 at the given `commit_id` or head (most recent commit) if None given.
421 """
433 """
422 if commit_id is not None:
434 if commit_id is not None:
423 self._validate_commit_id(commit_id)
435 self._validate_commit_id(commit_id)
424 elif commit_idx is not None:
436 elif commit_idx is not None:
425 self._validate_commit_idx(commit_idx)
437 self._validate_commit_idx(commit_idx)
426 commit_id = commit_idx
438 commit_id = commit_idx
427 commit_id = self._get_commit_id(commit_id)
439 commit_id = self._get_commit_id(commit_id)
428 try:
440 try:
429 # Need to call remote to translate id for tagging scenario
441 # Need to call remote to translate id for tagging scenario
430 commit_id = self._remote.get_object(commit_id)["commit_id"]
442 commit_id = self._remote.get_object(commit_id)["commit_id"]
431 idx = self._commit_ids[commit_id]
443 idx = self._commit_ids[commit_id]
432 except KeyError:
444 except KeyError:
433 raise RepositoryError("Cannot get object with id %s" % commit_id)
445 raise RepositoryError("Cannot get object with id %s" % commit_id)
434
446
435 return GitCommit(self, commit_id, idx, pre_load=pre_load)
447 return GitCommit(self, commit_id, idx, pre_load=pre_load)
436
448
437 def get_commits(
449 def get_commits(
438 self, start_id=None, end_id=None, start_date=None, end_date=None,
450 self, start_id=None, end_id=None, start_date=None, end_date=None,
439 branch_name=None, show_hidden=False, pre_load=None):
451 branch_name=None, show_hidden=False, pre_load=None):
440 """
452 """
441 Returns generator of `GitCommit` objects from start to end (both
453 Returns generator of `GitCommit` objects from start to end (both
442 are inclusive), in ascending date order.
454 are inclusive), in ascending date order.
443
455
444 :param start_id: None, str(commit_id)
456 :param start_id: None, str(commit_id)
445 :param end_id: None, str(commit_id)
457 :param end_id: None, str(commit_id)
446 :param start_date: if specified, commits with commit date less than
458 :param start_date: if specified, commits with commit date less than
447 ``start_date`` would be filtered out from returned set
459 ``start_date`` would be filtered out from returned set
448 :param end_date: if specified, commits with commit date greater than
460 :param end_date: if specified, commits with commit date greater than
449 ``end_date`` would be filtered out from returned set
461 ``end_date`` would be filtered out from returned set
450 :param branch_name: if specified, commits not reachable from given
462 :param branch_name: if specified, commits not reachable from given
451 branch would be filtered out from returned set
463 branch would be filtered out from returned set
452 :param show_hidden: Show hidden commits such as obsolete or hidden from
464 :param show_hidden: Show hidden commits such as obsolete or hidden from
453 Mercurial evolve
465 Mercurial evolve
454 :raise BranchDoesNotExistError: If given `branch_name` does not
466 :raise BranchDoesNotExistError: If given `branch_name` does not
455 exist.
467 exist.
456 :raise CommitDoesNotExistError: If commits for given `start` or
468 :raise CommitDoesNotExistError: If commits for given `start` or
457 `end` could not be found.
469 `end` could not be found.
458
470
459 """
471 """
460 if self.is_empty():
472 if self.is_empty():
461 raise EmptyRepositoryError("There are no commits yet")
473 raise EmptyRepositoryError("There are no commits yet")
462 self._validate_branch_name(branch_name)
474 self._validate_branch_name(branch_name)
463
475
464 if start_id is not None:
476 if start_id is not None:
465 self._validate_commit_id(start_id)
477 self._validate_commit_id(start_id)
466 if end_id is not None:
478 if end_id is not None:
467 self._validate_commit_id(end_id)
479 self._validate_commit_id(end_id)
468
480
469 start_raw_id = self._get_commit_id(start_id)
481 start_raw_id = self._get_commit_id(start_id)
470 start_pos = self._commit_ids[start_raw_id] if start_id else None
482 start_pos = self._commit_ids[start_raw_id] if start_id else None
471 end_raw_id = self._get_commit_id(end_id)
483 end_raw_id = self._get_commit_id(end_id)
472 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
484 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
473
485
474 if None not in [start_id, end_id] and start_pos > end_pos:
486 if None not in [start_id, end_id] and start_pos > end_pos:
475 raise RepositoryError(
487 raise RepositoryError(
476 "Start commit '%s' cannot be after end commit '%s'" %
488 "Start commit '%s' cannot be after end commit '%s'" %
477 (start_id, end_id))
489 (start_id, end_id))
478
490
479 if end_pos is not None:
491 if end_pos is not None:
480 end_pos += 1
492 end_pos += 1
481
493
482 filter_ = []
494 filter_ = []
483 if branch_name:
495 if branch_name:
484 filter_.append({'branch_name': branch_name})
496 filter_.append({'branch_name': branch_name})
485 if start_date and not end_date:
497 if start_date and not end_date:
486 filter_.append({'since': start_date})
498 filter_.append({'since': start_date})
487 if end_date and not start_date:
499 if end_date and not start_date:
488 filter_.append({'until': end_date})
500 filter_.append({'until': end_date})
489 if start_date and end_date:
501 if start_date and end_date:
490 filter_.append({'since': start_date})
502 filter_.append({'since': start_date})
491 filter_.append({'until': end_date})
503 filter_.append({'until': end_date})
492
504
493 # if start_pos or end_pos:
505 # if start_pos or end_pos:
494 # filter_.append({'start': start_pos})
506 # filter_.append({'start': start_pos})
495 # filter_.append({'end': end_pos})
507 # filter_.append({'end': end_pos})
496
508
497 if filter_:
509 if filter_:
498 revfilters = {
510 revfilters = {
499 'branch_name': branch_name,
511 'branch_name': branch_name,
500 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
512 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
501 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
513 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
502 'start': start_pos,
514 'start': start_pos,
503 'end': end_pos,
515 'end': end_pos,
504 }
516 }
505 commit_ids = self._get_all_commit_ids(filters=revfilters)
517 commit_ids = self._get_all_commit_ids(filters=revfilters)
506
518
507 # pure python stuff, it's slow due to walker walking whole repo
519 # pure python stuff, it's slow due to walker walking whole repo
508 # def get_revs(walker):
520 # def get_revs(walker):
509 # for walker_entry in walker:
521 # for walker_entry in walker:
510 # yield walker_entry.commit.id
522 # yield walker_entry.commit.id
511 # revfilters = {}
523 # revfilters = {}
512 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
524 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
513 else:
525 else:
514 commit_ids = self.commit_ids
526 commit_ids = self.commit_ids
515
527
516 if start_pos or end_pos:
528 if start_pos or end_pos:
517 commit_ids = commit_ids[start_pos: end_pos]
529 commit_ids = commit_ids[start_pos: end_pos]
518
530
519 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
531 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
520
532
521 def get_diff(
533 def get_diff(
522 self, commit1, commit2, path='', ignore_whitespace=False,
534 self, commit1, commit2, path='', ignore_whitespace=False,
523 context=3, path1=None):
535 context=3, path1=None):
524 """
536 """
525 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 Returns (git like) *diff*, as plain text. Shows changes introduced by
526 ``commit2`` since ``commit1``.
538 ``commit2`` since ``commit1``.
527
539
528 :param commit1: Entry point from which diff is shown. Can be
540 :param commit1: Entry point from which diff is shown. Can be
529 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 ``self.EMPTY_COMMIT`` - in this case, patch showing all
530 the changes since empty state of the repository until ``commit2``
542 the changes since empty state of the repository until ``commit2``
531 :param commit2: Until which commits changes should be shown.
543 :param commit2: Until which commits changes should be shown.
532 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 :param ignore_whitespace: If set to ``True``, would not show whitespace
533 changes. Defaults to ``False``.
545 changes. Defaults to ``False``.
534 :param context: How many lines before/after changed lines should be
546 :param context: How many lines before/after changed lines should be
535 shown. Defaults to ``3``.
547 shown. Defaults to ``3``.
536 """
548 """
537 self._validate_diff_commits(commit1, commit2)
549 self._validate_diff_commits(commit1, commit2)
538 if path1 is not None and path1 != path:
550 if path1 is not None and path1 != path:
539 raise ValueError("Diff of two different paths not supported.")
551 raise ValueError("Diff of two different paths not supported.")
540
552
541 flags = [
553 flags = [
542 '-U%s' % context, '--full-index', '--binary', '-p',
554 '-U%s' % context, '--full-index', '--binary', '-p',
543 '-M', '--abbrev=40']
555 '-M', '--abbrev=40']
544 if ignore_whitespace:
556 if ignore_whitespace:
545 flags.append('-w')
557 flags.append('-w')
546
558
547 if commit1 == self.EMPTY_COMMIT:
559 if commit1 == self.EMPTY_COMMIT:
548 cmd = ['show'] + flags + [commit2.raw_id]
560 cmd = ['show'] + flags + [commit2.raw_id]
549 else:
561 else:
550 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
562 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
551
563
552 if path:
564 if path:
553 cmd.extend(['--', path])
565 cmd.extend(['--', path])
554
566
555 stdout, __ = self.run_git_command(cmd)
567 stdout, __ = self.run_git_command(cmd)
556 # If we used 'show' command, strip first few lines (until actual diff
568 # If we used 'show' command, strip first few lines (until actual diff
557 # starts)
569 # starts)
558 if commit1 == self.EMPTY_COMMIT:
570 if commit1 == self.EMPTY_COMMIT:
559 lines = stdout.splitlines()
571 lines = stdout.splitlines()
560 x = 0
572 x = 0
561 for line in lines:
573 for line in lines:
562 if line.startswith('diff'):
574 if line.startswith('diff'):
563 break
575 break
564 x += 1
576 x += 1
565 # Append new line just like 'diff' command do
577 # Append new line just like 'diff' command do
566 stdout = '\n'.join(lines[x:]) + '\n'
578 stdout = '\n'.join(lines[x:]) + '\n'
567 return GitDiff(stdout)
579 return GitDiff(stdout)
568
580
569 def strip(self, commit_id, branch_name):
581 def strip(self, commit_id, branch_name):
570 commit = self.get_commit(commit_id=commit_id)
582 commit = self.get_commit(commit_id=commit_id)
571 if commit.merge:
583 if commit.merge:
572 raise Exception('Cannot reset to merge commit')
584 raise Exception('Cannot reset to merge commit')
573
585
574 # parent is going to be the new head now
586 # parent is going to be the new head now
575 commit = commit.parents[0]
587 commit = commit.parents[0]
576 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
588 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
577
589
578 self.commit_ids = self._get_all_commit_ids()
590 self.commit_ids = self._get_all_commit_ids()
579 self._rebuild_cache(self.commit_ids)
591 self._rebuild_cache(self.commit_ids)
580
592
581 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
593 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
582 if commit_id1 == commit_id2:
594 if commit_id1 == commit_id2:
583 return commit_id1
595 return commit_id1
584
596
585 if self != repo2:
597 if self != repo2:
586 commits = self._remote.get_missing_revs(
598 commits = self._remote.get_missing_revs(
587 commit_id1, commit_id2, repo2.path)
599 commit_id1, commit_id2, repo2.path)
588 if commits:
600 if commits:
589 commit = repo2.get_commit(commits[-1])
601 commit = repo2.get_commit(commits[-1])
590 if commit.parents:
602 if commit.parents:
591 ancestor_id = commit.parents[0].raw_id
603 ancestor_id = commit.parents[0].raw_id
592 else:
604 else:
593 ancestor_id = None
605 ancestor_id = None
594 else:
606 else:
595 # no commits from other repo, ancestor_id is the commit_id2
607 # no commits from other repo, ancestor_id is the commit_id2
596 ancestor_id = commit_id2
608 ancestor_id = commit_id2
597 else:
609 else:
598 output, __ = self.run_git_command(
610 output, __ = self.run_git_command(
599 ['merge-base', commit_id1, commit_id2])
611 ['merge-base', commit_id1, commit_id2])
600 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
612 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
601
613
602 return ancestor_id
614 return ancestor_id
603
615
604 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
616 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
605 repo1 = self
617 repo1 = self
606 ancestor_id = None
618 ancestor_id = None
607
619
608 if commit_id1 == commit_id2:
620 if commit_id1 == commit_id2:
609 commits = []
621 commits = []
610 elif repo1 != repo2:
622 elif repo1 != repo2:
611 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
623 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
612 repo2.path)
624 repo2.path)
613 commits = [
625 commits = [
614 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
626 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
615 for commit_id in reversed(missing_ids)]
627 for commit_id in reversed(missing_ids)]
616 else:
628 else:
617 output, __ = repo1.run_git_command(
629 output, __ = repo1.run_git_command(
618 ['log', '--reverse', '--pretty=format: %H', '-s',
630 ['log', '--reverse', '--pretty=format: %H', '-s',
619 '%s..%s' % (commit_id1, commit_id2)])
631 '%s..%s' % (commit_id1, commit_id2)])
620 commits = [
632 commits = [
621 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
633 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
622 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
634 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
623
635
624 return commits
636 return commits
625
637
626 @LazyProperty
638 @LazyProperty
627 def in_memory_commit(self):
639 def in_memory_commit(self):
628 """
640 """
629 Returns ``GitInMemoryCommit`` object for this repository.
641 Returns ``GitInMemoryCommit`` object for this repository.
630 """
642 """
631 return GitInMemoryCommit(self)
643 return GitInMemoryCommit(self)
632
644
633 def clone(self, url, update_after_clone=True, bare=False):
645 def pull(self, url, commit_ids=None, update_after=False):
634 """
646 """
635 Tries to clone commits from external location.
647 Pull changes from external location. Pull is different in GIT
636
648 that fetch since it's doing a checkout
637 :param update_after_clone: If set to ``False``, git won't checkout
638 working directory
639 :param bare: If set to ``True``, repository would be cloned into
640 *bare* git repository (no working directory at all).
641 """
642 # init_bare and init expect empty dir created to proceed
643 if not os.path.exists(self.path):
644 os.mkdir(self.path)
645
649
646 if bare:
650 :param commit_ids: Optional. Can be set to a list of commit ids
647 self._remote.init_bare()
651 which shall be pulled from the other repository.
648 else:
649 self._remote.init()
650
651 deferred = '^{}'
652 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
653
654 return self._remote.clone(
655 url, deferred, valid_refs, update_after_clone)
656
657 def pull(self, url, commit_ids=None):
658 """
652 """
659 Tries to pull changes from external location. We use fetch here since
653 refs = None
660 pull in get does merges and we want to be compatible with hg backend so
654 if commit_ids is not None:
661 pull == fetch in this case
655 remote_refs = self._remote.get_remote_refs(url)
662 """
656 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
663 self.fetch(url, commit_ids=commit_ids)
657 self._remote.pull(url, refs=refs, update_after=update_after)
658 self._remote.invalidate_vcs_cache()
664
659
665 def fetch(self, url, commit_ids=None):
660 def fetch(self, url, commit_ids=None):
666 """
661 """
667 Tries to fetch changes from external location.
662 Fetch all git objects from external location.
668 """
663 """
669 refs = None
664 self._remote.sync_fetch(url, refs=commit_ids)
670
665 self._remote.invalidate_vcs_cache()
671 if commit_ids is not None:
672 remote_refs = self._remote.get_remote_refs(url)
673 refs = [
674 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
675 self._remote.fetch(url, refs=refs)
676
666
677 def push(self, url):
667 def push(self, url):
678 refs = None
668 refs = None
679 self._remote.sync_push(url, refs=refs)
669 self._remote.sync_push(url, refs=refs)
680
670
681 def set_refs(self, ref_name, commit_id):
671 def set_refs(self, ref_name, commit_id):
682 self._remote.set_refs(ref_name, commit_id)
672 self._remote.set_refs(ref_name, commit_id)
683
673
684 def remove_ref(self, ref_name):
674 def remove_ref(self, ref_name):
685 self._remote.remove_ref(ref_name)
675 self._remote.remove_ref(ref_name)
686
676
687 def _update_server_info(self):
677 def _update_server_info(self):
688 """
678 """
689 runs gits update-server-info command in this repo instance
679 runs gits update-server-info command in this repo instance
690 """
680 """
691 self._remote.update_server_info()
681 self._remote.update_server_info()
692
682
693 def _current_branch(self):
683 def _current_branch(self):
694 """
684 """
695 Return the name of the current branch.
685 Return the name of the current branch.
696
686
697 It only works for non bare repositories (i.e. repositories with a
687 It only works for non bare repositories (i.e. repositories with a
698 working copy)
688 working copy)
699 """
689 """
700 if self.bare:
690 if self.bare:
701 raise RepositoryError('Bare git repos do not have active branches')
691 raise RepositoryError('Bare git repos do not have active branches')
702
692
703 if self.is_empty():
693 if self.is_empty():
704 return None
694 return None
705
695
706 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
696 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
707 return stdout.strip()
697 return stdout.strip()
708
698
709 def _checkout(self, branch_name, create=False, force=False):
699 def _checkout(self, branch_name, create=False, force=False):
710 """
700 """
711 Checkout a branch in the working directory.
701 Checkout a branch in the working directory.
712
702
713 It tries to create the branch if create is True, failing if the branch
703 It tries to create the branch if create is True, failing if the branch
714 already exists.
704 already exists.
715
705
716 It only works for non bare repositories (i.e. repositories with a
706 It only works for non bare repositories (i.e. repositories with a
717 working copy)
707 working copy)
718 """
708 """
719 if self.bare:
709 if self.bare:
720 raise RepositoryError('Cannot checkout branches in a bare git repo')
710 raise RepositoryError('Cannot checkout branches in a bare git repo')
721
711
722 cmd = ['checkout']
712 cmd = ['checkout']
723 if force:
713 if force:
724 cmd.append('-f')
714 cmd.append('-f')
725 if create:
715 if create:
726 cmd.append('-b')
716 cmd.append('-b')
727 cmd.append(branch_name)
717 cmd.append(branch_name)
728 self.run_git_command(cmd, fail_on_stderr=False)
718 self.run_git_command(cmd, fail_on_stderr=False)
729
719
730 def _identify(self):
720 def _identify(self):
731 """
721 """
732 Return the current state of the working directory.
722 Return the current state of the working directory.
733 """
723 """
734 if self.bare:
724 if self.bare:
735 raise RepositoryError('Bare git repos do not have active branches')
725 raise RepositoryError('Bare git repos do not have active branches')
736
726
737 if self.is_empty():
727 if self.is_empty():
738 return None
728 return None
739
729
740 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
730 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
741 return stdout.strip()
731 return stdout.strip()
742
732
743 def _local_clone(self, clone_path, branch_name, source_branch=None):
733 def _local_clone(self, clone_path, branch_name, source_branch=None):
744 """
734 """
745 Create a local clone of the current repo.
735 Create a local clone of the current repo.
746 """
736 """
747 # N.B.(skreft): the --branch option is required as otherwise the shallow
737 # N.B.(skreft): the --branch option is required as otherwise the shallow
748 # clone will only fetch the active branch.
738 # clone will only fetch the active branch.
749 cmd = ['clone', '--branch', branch_name,
739 cmd = ['clone', '--branch', branch_name,
750 self.path, os.path.abspath(clone_path)]
740 self.path, os.path.abspath(clone_path)]
751
741
752 self.run_git_command(cmd, fail_on_stderr=False)
742 self.run_git_command(cmd, fail_on_stderr=False)
753
743
754 # if we get the different source branch, make sure we also fetch it for
744 # if we get the different source branch, make sure we also fetch it for
755 # merge conditions
745 # merge conditions
756 if source_branch and source_branch != branch_name:
746 if source_branch and source_branch != branch_name:
757 # check if the ref exists.
747 # check if the ref exists.
758 shadow_repo = GitRepository(os.path.abspath(clone_path))
748 shadow_repo = GitRepository(os.path.abspath(clone_path))
759 if shadow_repo.get_remote_ref(source_branch):
749 if shadow_repo.get_remote_ref(source_branch):
760 cmd = ['fetch', self.path, source_branch]
750 cmd = ['fetch', self.path, source_branch]
761 self.run_git_command(cmd, fail_on_stderr=False)
751 self.run_git_command(cmd, fail_on_stderr=False)
762
752
763 def _local_fetch(self, repository_path, branch_name, use_origin=False):
753 def _local_fetch(self, repository_path, branch_name, use_origin=False):
764 """
754 """
765 Fetch a branch from a local repository.
755 Fetch a branch from a local repository.
766 """
756 """
767 repository_path = os.path.abspath(repository_path)
757 repository_path = os.path.abspath(repository_path)
768 if repository_path == self.path:
758 if repository_path == self.path:
769 raise ValueError('Cannot fetch from the same repository')
759 raise ValueError('Cannot fetch from the same repository')
770
760
771 if use_origin:
761 if use_origin:
772 branch_name = '+{branch}:refs/heads/{branch}'.format(
762 branch_name = '+{branch}:refs/heads/{branch}'.format(
773 branch=branch_name)
763 branch=branch_name)
774
764
775 cmd = ['fetch', '--no-tags', '--update-head-ok',
765 cmd = ['fetch', '--no-tags', '--update-head-ok',
776 repository_path, branch_name]
766 repository_path, branch_name]
777 self.run_git_command(cmd, fail_on_stderr=False)
767 self.run_git_command(cmd, fail_on_stderr=False)
778
768
779 def _local_reset(self, branch_name):
769 def _local_reset(self, branch_name):
780 branch_name = '{}'.format(branch_name)
770 branch_name = '{}'.format(branch_name)
781 cmd = ['reset', '--hard', branch_name]
771 cmd = ['reset', '--hard', branch_name]
782 self.run_git_command(cmd, fail_on_stderr=False)
772 self.run_git_command(cmd, fail_on_stderr=False)
783
773
784 def _last_fetch_heads(self):
774 def _last_fetch_heads(self):
785 """
775 """
786 Return the last fetched heads that need merging.
776 Return the last fetched heads that need merging.
787
777
788 The algorithm is defined at
778 The algorithm is defined at
789 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
779 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
790 """
780 """
791 if not self.bare:
781 if not self.bare:
792 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
782 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
793 else:
783 else:
794 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
784 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
795
785
796 heads = []
786 heads = []
797 with open(fetch_heads_path) as f:
787 with open(fetch_heads_path) as f:
798 for line in f:
788 for line in f:
799 if ' not-for-merge ' in line:
789 if ' not-for-merge ' in line:
800 continue
790 continue
801 line = re.sub('\t.*', '', line, flags=re.DOTALL)
791 line = re.sub('\t.*', '', line, flags=re.DOTALL)
802 heads.append(line)
792 heads.append(line)
803
793
804 return heads
794 return heads
805
795
806 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
796 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
807 return GitRepository(shadow_repository_path)
797 return GitRepository(shadow_repository_path)
808
798
809 def _local_pull(self, repository_path, branch_name, ff_only=True):
799 def _local_pull(self, repository_path, branch_name, ff_only=True):
810 """
800 """
811 Pull a branch from a local repository.
801 Pull a branch from a local repository.
812 """
802 """
813 if self.bare:
803 if self.bare:
814 raise RepositoryError('Cannot pull into a bare git repository')
804 raise RepositoryError('Cannot pull into a bare git repository')
815 # N.B.(skreft): The --ff-only option is to make sure this is a
805 # N.B.(skreft): The --ff-only option is to make sure this is a
816 # fast-forward (i.e., we are only pulling new changes and there are no
806 # fast-forward (i.e., we are only pulling new changes and there are no
817 # conflicts with our current branch)
807 # conflicts with our current branch)
818 # Additionally, that option needs to go before --no-tags, otherwise git
808 # Additionally, that option needs to go before --no-tags, otherwise git
819 # pull complains about it being an unknown flag.
809 # pull complains about it being an unknown flag.
820 cmd = ['pull']
810 cmd = ['pull']
821 if ff_only:
811 if ff_only:
822 cmd.append('--ff-only')
812 cmd.append('--ff-only')
823 cmd.extend(['--no-tags', repository_path, branch_name])
813 cmd.extend(['--no-tags', repository_path, branch_name])
824 self.run_git_command(cmd, fail_on_stderr=False)
814 self.run_git_command(cmd, fail_on_stderr=False)
825
815
826 def _local_merge(self, merge_message, user_name, user_email, heads):
816 def _local_merge(self, merge_message, user_name, user_email, heads):
827 """
817 """
828 Merge the given head into the checked out branch.
818 Merge the given head into the checked out branch.
829
819
830 It will force a merge commit.
820 It will force a merge commit.
831
821
832 Currently it raises an error if the repo is empty, as it is not possible
822 Currently it raises an error if the repo is empty, as it is not possible
833 to create a merge commit in an empty repo.
823 to create a merge commit in an empty repo.
834
824
835 :param merge_message: The message to use for the merge commit.
825 :param merge_message: The message to use for the merge commit.
836 :param heads: the heads to merge.
826 :param heads: the heads to merge.
837 """
827 """
838 if self.bare:
828 if self.bare:
839 raise RepositoryError('Cannot merge into a bare git repository')
829 raise RepositoryError('Cannot merge into a bare git repository')
840
830
841 if not heads:
831 if not heads:
842 return
832 return
843
833
844 if self.is_empty():
834 if self.is_empty():
845 # TODO(skreft): do somehting more robust in this case.
835 # TODO(skreft): do somehting more robust in this case.
846 raise RepositoryError(
836 raise RepositoryError(
847 'Do not know how to merge into empty repositories yet')
837 'Do not know how to merge into empty repositories yet')
848
838
849 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
839 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
850 # commit message. We also specify the user who is doing the merge.
840 # commit message. We also specify the user who is doing the merge.
851 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
841 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
852 '-c', 'user.email=%s' % safe_str(user_email),
842 '-c', 'user.email=%s' % safe_str(user_email),
853 'merge', '--no-ff', '-m', safe_str(merge_message)]
843 'merge', '--no-ff', '-m', safe_str(merge_message)]
854 cmd.extend(heads)
844 cmd.extend(heads)
855 try:
845 try:
856 output = self.run_git_command(cmd, fail_on_stderr=False)
846 output = self.run_git_command(cmd, fail_on_stderr=False)
857 except RepositoryError:
847 except RepositoryError:
858 # Cleanup any merge leftovers
848 # Cleanup any merge leftovers
859 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
860 raise
850 raise
861
851
862 def _local_push(
852 def _local_push(
863 self, source_branch, repository_path, target_branch,
853 self, source_branch, repository_path, target_branch,
864 enable_hooks=False, rc_scm_data=None):
854 enable_hooks=False, rc_scm_data=None):
865 """
855 """
866 Push the source_branch to the given repository and target_branch.
856 Push the source_branch to the given repository and target_branch.
867
857
868 Currently it if the target_branch is not master and the target repo is
858 Currently it if the target_branch is not master and the target repo is
869 empty, the push will work, but then GitRepository won't be able to find
859 empty, the push will work, but then GitRepository won't be able to find
870 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
860 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
871 pointing to master, which does not exist).
861 pointing to master, which does not exist).
872
862
873 It does not run the hooks in the target repo.
863 It does not run the hooks in the target repo.
874 """
864 """
875 # TODO(skreft): deal with the case in which the target repo is empty,
865 # TODO(skreft): deal with the case in which the target repo is empty,
876 # and the target_branch is not master.
866 # and the target_branch is not master.
877 target_repo = GitRepository(repository_path)
867 target_repo = GitRepository(repository_path)
878 if (not target_repo.bare and
868 if (not target_repo.bare and
879 target_repo._current_branch() == target_branch):
869 target_repo._current_branch() == target_branch):
880 # Git prevents pushing to the checked out branch, so simulate it by
870 # Git prevents pushing to the checked out branch, so simulate it by
881 # pulling into the target repository.
871 # pulling into the target repository.
882 target_repo._local_pull(self.path, source_branch)
872 target_repo._local_pull(self.path, source_branch)
883 else:
873 else:
884 cmd = ['push', os.path.abspath(repository_path),
874 cmd = ['push', os.path.abspath(repository_path),
885 '%s:%s' % (source_branch, target_branch)]
875 '%s:%s' % (source_branch, target_branch)]
886 gitenv = {}
876 gitenv = {}
887 if rc_scm_data:
877 if rc_scm_data:
888 gitenv.update({'RC_SCM_DATA': rc_scm_data})
878 gitenv.update({'RC_SCM_DATA': rc_scm_data})
889
879
890 if not enable_hooks:
880 if not enable_hooks:
891 gitenv['RC_SKIP_HOOKS'] = '1'
881 gitenv['RC_SKIP_HOOKS'] = '1'
892 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
882 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
893
883
894 def _get_new_pr_branch(self, source_branch, target_branch):
884 def _get_new_pr_branch(self, source_branch, target_branch):
895 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
885 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
896 pr_branches = []
886 pr_branches = []
897 for branch in self.branches:
887 for branch in self.branches:
898 if branch.startswith(prefix):
888 if branch.startswith(prefix):
899 pr_branches.append(int(branch[len(prefix):]))
889 pr_branches.append(int(branch[len(prefix):]))
900
890
901 if not pr_branches:
891 if not pr_branches:
902 branch_id = 0
892 branch_id = 0
903 else:
893 else:
904 branch_id = max(pr_branches) + 1
894 branch_id = max(pr_branches) + 1
905
895
906 return '%s%d' % (prefix, branch_id)
896 return '%s%d' % (prefix, branch_id)
907
897
908 def _maybe_prepare_merge_workspace(
898 def _maybe_prepare_merge_workspace(
909 self, repo_id, workspace_id, target_ref, source_ref):
899 self, repo_id, workspace_id, target_ref, source_ref):
910 shadow_repository_path = self._get_shadow_repository_path(
900 shadow_repository_path = self._get_shadow_repository_path(
911 repo_id, workspace_id)
901 repo_id, workspace_id)
912 if not os.path.exists(shadow_repository_path):
902 if not os.path.exists(shadow_repository_path):
913 self._local_clone(
903 self._local_clone(
914 shadow_repository_path, target_ref.name, source_ref.name)
904 shadow_repository_path, target_ref.name, source_ref.name)
915 log.debug(
905 log.debug(
916 'Prepared shadow repository in %s', shadow_repository_path)
906 'Prepared shadow repository in %s', shadow_repository_path)
917
907
918 return shadow_repository_path
908 return shadow_repository_path
919
909
920 def _merge_repo(self, repo_id, workspace_id, target_ref,
910 def _merge_repo(self, repo_id, workspace_id, target_ref,
921 source_repo, source_ref, merge_message,
911 source_repo, source_ref, merge_message,
922 merger_name, merger_email, dry_run=False,
912 merger_name, merger_email, dry_run=False,
923 use_rebase=False, close_branch=False):
913 use_rebase=False, close_branch=False):
924 if target_ref.commit_id != self.branches[target_ref.name]:
914 if target_ref.commit_id != self.branches[target_ref.name]:
925 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
915 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
926 target_ref.commit_id, self.branches[target_ref.name])
916 target_ref.commit_id, self.branches[target_ref.name])
927 return MergeResponse(
917 return MergeResponse(
928 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
918 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
929
919
930 shadow_repository_path = self._maybe_prepare_merge_workspace(
920 shadow_repository_path = self._maybe_prepare_merge_workspace(
931 repo_id, workspace_id, target_ref, source_ref)
921 repo_id, workspace_id, target_ref, source_ref)
932 shadow_repo = self._get_shadow_instance(shadow_repository_path)
922 shadow_repo = self._get_shadow_instance(shadow_repository_path)
933
923
934 # checkout source, if it's different. Otherwise we could not
924 # checkout source, if it's different. Otherwise we could not
935 # fetch proper commits for merge testing
925 # fetch proper commits for merge testing
936 if source_ref.name != target_ref.name:
926 if source_ref.name != target_ref.name:
937 if shadow_repo.get_remote_ref(source_ref.name):
927 if shadow_repo.get_remote_ref(source_ref.name):
938 shadow_repo._checkout(source_ref.name, force=True)
928 shadow_repo._checkout(source_ref.name, force=True)
939
929
940 # checkout target, and fetch changes
930 # checkout target, and fetch changes
941 shadow_repo._checkout(target_ref.name, force=True)
931 shadow_repo._checkout(target_ref.name, force=True)
942
932
943 # fetch/reset pull the target, in case it is changed
933 # fetch/reset pull the target, in case it is changed
944 # this handles even force changes
934 # this handles even force changes
945 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
935 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
946 shadow_repo._local_reset(target_ref.name)
936 shadow_repo._local_reset(target_ref.name)
947
937
948 # Need to reload repo to invalidate the cache, or otherwise we cannot
938 # Need to reload repo to invalidate the cache, or otherwise we cannot
949 # retrieve the last target commit.
939 # retrieve the last target commit.
950 shadow_repo = self._get_shadow_instance(shadow_repository_path)
940 shadow_repo = self._get_shadow_instance(shadow_repository_path)
951 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
941 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
952 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
942 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
953 target_ref, target_ref.commit_id,
943 target_ref, target_ref.commit_id,
954 shadow_repo.branches[target_ref.name])
944 shadow_repo.branches[target_ref.name])
955 return MergeResponse(
945 return MergeResponse(
956 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
946 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
957
947
958 # calculate new branch
948 # calculate new branch
959 pr_branch = shadow_repo._get_new_pr_branch(
949 pr_branch = shadow_repo._get_new_pr_branch(
960 source_ref.name, target_ref.name)
950 source_ref.name, target_ref.name)
961 log.debug('using pull-request merge branch: `%s`', pr_branch)
951 log.debug('using pull-request merge branch: `%s`', pr_branch)
962 # checkout to temp branch, and fetch changes
952 # checkout to temp branch, and fetch changes
963 shadow_repo._checkout(pr_branch, create=True)
953 shadow_repo._checkout(pr_branch, create=True)
964 try:
954 try:
965 shadow_repo._local_fetch(source_repo.path, source_ref.name)
955 shadow_repo._local_fetch(source_repo.path, source_ref.name)
966 except RepositoryError:
956 except RepositoryError:
967 log.exception('Failure when doing local fetch on git shadow repo')
957 log.exception('Failure when doing local fetch on git shadow repo')
968 return MergeResponse(
958 return MergeResponse(
969 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
959 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
970
960
971 merge_ref = None
961 merge_ref = None
972 merge_failure_reason = MergeFailureReason.NONE
962 merge_failure_reason = MergeFailureReason.NONE
973 try:
963 try:
974 shadow_repo._local_merge(merge_message, merger_name, merger_email,
964 shadow_repo._local_merge(merge_message, merger_name, merger_email,
975 [source_ref.commit_id])
965 [source_ref.commit_id])
976 merge_possible = True
966 merge_possible = True
977
967
978 # Need to reload repo to invalidate the cache, or otherwise we
968 # Need to reload repo to invalidate the cache, or otherwise we
979 # cannot retrieve the merge commit.
969 # cannot retrieve the merge commit.
980 shadow_repo = GitRepository(shadow_repository_path)
970 shadow_repo = GitRepository(shadow_repository_path)
981 merge_commit_id = shadow_repo.branches[pr_branch]
971 merge_commit_id = shadow_repo.branches[pr_branch]
982
972
983 # Set a reference pointing to the merge commit. This reference may
973 # Set a reference pointing to the merge commit. This reference may
984 # be used to easily identify the last successful merge commit in
974 # be used to easily identify the last successful merge commit in
985 # the shadow repository.
975 # the shadow repository.
986 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
976 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
987 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
977 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
988 except RepositoryError:
978 except RepositoryError:
989 log.exception('Failure when doing local merge on git shadow repo')
979 log.exception('Failure when doing local merge on git shadow repo')
990 merge_possible = False
980 merge_possible = False
991 merge_failure_reason = MergeFailureReason.MERGE_FAILED
981 merge_failure_reason = MergeFailureReason.MERGE_FAILED
992
982
993 if merge_possible and not dry_run:
983 if merge_possible and not dry_run:
994 try:
984 try:
995 shadow_repo._local_push(
985 shadow_repo._local_push(
996 pr_branch, self.path, target_ref.name, enable_hooks=True,
986 pr_branch, self.path, target_ref.name, enable_hooks=True,
997 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
987 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
998 merge_succeeded = True
988 merge_succeeded = True
999 except RepositoryError:
989 except RepositoryError:
1000 log.exception(
990 log.exception(
1001 'Failure when doing local push on git shadow repo')
991 'Failure when doing local push on git shadow repo')
1002 merge_succeeded = False
992 merge_succeeded = False
1003 merge_failure_reason = MergeFailureReason.PUSH_FAILED
993 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1004 else:
994 else:
1005 merge_succeeded = False
995 merge_succeeded = False
1006
996
1007 return MergeResponse(
997 return MergeResponse(
1008 merge_possible, merge_succeeded, merge_ref,
998 merge_possible, merge_succeeded, merge_ref,
1009 merge_failure_reason)
999 merge_failure_reason)
@@ -1,917 +1,924 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, exceptions
35 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference, BasePathPermissionChecker)
38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 from rhodecode.lib.vcs.compat import configparser
45 from rhodecode.lib.vcs.compat import configparser
46
46
47 hexlify = binascii.hexlify
47 hexlify = binascii.hexlify
48 nullid = "\0" * 20
48 nullid = "\0" * 20
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class MercurialRepository(BaseRepository):
53 class MercurialRepository(BaseRepository):
54 """
54 """
55 Mercurial repository backend
55 Mercurial repository backend
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'default'
57 DEFAULT_BRANCH_NAME = 'default'
58
58
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 update_after_clone=False, with_wire=None):
60 do_workspace_checkout=False, with_wire=None, bare=False):
61 """
61 """
62 Raises RepositoryError if repository could not be find at the given
62 Raises RepositoryError if repository could not be find at the given
63 ``repo_path``.
63 ``repo_path``.
64
64
65 :param repo_path: local path of the repository
65 :param repo_path: local path of the repository
66 :param config: config object containing the repo configuration
66 :param config: config object containing the repo configuration
67 :param create=False: if set to True, would try to create repository if
67 :param create=False: if set to True, would try to create repository if
68 it does not exist rather than raising exception
68 it does not exist rather than raising exception
69 :param src_url=None: would try to clone repository from given location
69 :param src_url=None: would try to clone repository from given location
70 :param update_after_clone=False: sets update of working copy after
70 :param do_workspace_checkout=False: sets update of working copy after
71 making a clone
71 making a clone
72 :param bare: not used, compatible with other VCS
72 """
73 """
73
74
74 self.path = safe_str(os.path.abspath(repo_path))
75 self.path = safe_str(os.path.abspath(repo_path))
75 # mercurial since 4.4.X requires certain configuration to be present
76 # mercurial since 4.4.X requires certain configuration to be present
76 # because sometimes we init the repos with config we need to meet
77 # because sometimes we init the repos with config we need to meet
77 # special requirements
78 # special requirements
78 self.config = config if config else self.get_default_config(
79 self.config = config if config else self.get_default_config(
79 default=[('extensions', 'largefiles', '1')])
80 default=[('extensions', 'largefiles', '1')])
80 self.with_wire = with_wire
81 self.with_wire = with_wire
81
82
82 self._init_repo(create, src_url, update_after_clone)
83 self._init_repo(create, src_url, do_workspace_checkout)
83
84
84 # caches
85 # caches
85 self._commit_ids = {}
86 self._commit_ids = {}
86
87
87 @LazyProperty
88 @LazyProperty
88 def _remote(self):
89 def _remote(self):
89 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
90
91
91 @LazyProperty
92 @LazyProperty
92 def commit_ids(self):
93 def commit_ids(self):
93 """
94 """
94 Returns list of commit ids, in ascending order. Being lazy
95 Returns list of commit ids, in ascending order. Being lazy
95 attribute allows external tools to inject shas from cache.
96 attribute allows external tools to inject shas from cache.
96 """
97 """
97 commit_ids = self._get_all_commit_ids()
98 commit_ids = self._get_all_commit_ids()
98 self._rebuild_cache(commit_ids)
99 self._rebuild_cache(commit_ids)
99 return commit_ids
100 return commit_ids
100
101
101 def _rebuild_cache(self, commit_ids):
102 def _rebuild_cache(self, commit_ids):
102 self._commit_ids = dict((commit_id, index)
103 self._commit_ids = dict((commit_id, index)
103 for index, commit_id in enumerate(commit_ids))
104 for index, commit_id in enumerate(commit_ids))
104
105
105 @LazyProperty
106 @LazyProperty
106 def branches(self):
107 def branches(self):
107 return self._get_branches()
108 return self._get_branches()
108
109
109 @LazyProperty
110 @LazyProperty
110 def branches_closed(self):
111 def branches_closed(self):
111 return self._get_branches(active=False, closed=True)
112 return self._get_branches(active=False, closed=True)
112
113
113 @LazyProperty
114 @LazyProperty
114 def branches_all(self):
115 def branches_all(self):
115 all_branches = {}
116 all_branches = {}
116 all_branches.update(self.branches)
117 all_branches.update(self.branches)
117 all_branches.update(self.branches_closed)
118 all_branches.update(self.branches_closed)
118 return all_branches
119 return all_branches
119
120
120 def _get_branches(self, active=True, closed=False):
121 def _get_branches(self, active=True, closed=False):
121 """
122 """
122 Gets branches for this repository
123 Gets branches for this repository
123 Returns only not closed active branches by default
124 Returns only not closed active branches by default
124
125
125 :param active: return also active branches
126 :param active: return also active branches
126 :param closed: return also closed branches
127 :param closed: return also closed branches
127
128
128 """
129 """
129 if self.is_empty():
130 if self.is_empty():
130 return {}
131 return {}
131
132
132 def get_name(ctx):
133 def get_name(ctx):
133 return ctx[0]
134 return ctx[0]
134
135
135 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
136 self._remote.branches(active, closed).items()]
137 self._remote.branches(active, closed).items()]
137
138
138 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
139
140
140 @LazyProperty
141 @LazyProperty
141 def tags(self):
142 def tags(self):
142 """
143 """
143 Gets tags for this repository
144 Gets tags for this repository
144 """
145 """
145 return self._get_tags()
146 return self._get_tags()
146
147
147 def _get_tags(self):
148 def _get_tags(self):
148 if self.is_empty():
149 if self.is_empty():
149 return {}
150 return {}
150
151
151 def get_name(ctx):
152 def get_name(ctx):
152 return ctx[0]
153 return ctx[0]
153
154
154 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
155 self._remote.tags().items()]
156 self._remote.tags().items()]
156
157
157 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
158
159
159 def tag(self, name, user, commit_id=None, message=None, date=None,
160 def tag(self, name, user, commit_id=None, message=None, date=None,
160 **kwargs):
161 **kwargs):
161 """
162 """
162 Creates and returns a tag for the given ``commit_id``.
163 Creates and returns a tag for the given ``commit_id``.
163
164
164 :param name: name for new tag
165 :param name: name for new tag
165 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
166 :param commit_id: commit id for which new tag would be created
167 :param commit_id: commit id for which new tag would be created
167 :param message: message of the tag's commit
168 :param message: message of the tag's commit
168 :param date: date of tag's commit
169 :param date: date of tag's commit
169
170
170 :raises TagAlreadyExistError: if tag with same name already exists
171 :raises TagAlreadyExistError: if tag with same name already exists
171 """
172 """
172 if name in self.tags:
173 if name in self.tags:
173 raise TagAlreadyExistError("Tag %s already exists" % name)
174 raise TagAlreadyExistError("Tag %s already exists" % name)
174 commit = self.get_commit(commit_id=commit_id)
175 commit = self.get_commit(commit_id=commit_id)
175 local = kwargs.setdefault('local', False)
176 local = kwargs.setdefault('local', False)
176
177
177 if message is None:
178 if message is None:
178 message = "Added tag %s for commit %s" % (name, commit.short_id)
179 message = "Added tag %s for commit %s" % (name, commit.short_id)
179
180
180 date, tz = date_to_timestamp_plus_offset(date)
181 date, tz = date_to_timestamp_plus_offset(date)
181
182
182 self._remote.tag(
183 self._remote.tag(
183 name, commit.raw_id, message, local, user, date, tz)
184 name, commit.raw_id, message, local, user, date, tz)
184 self._remote.invalidate_vcs_cache()
185 self._remote.invalidate_vcs_cache()
185
186
186 # Reinitialize tags
187 # Reinitialize tags
187 self.tags = self._get_tags()
188 self.tags = self._get_tags()
188 tag_id = self.tags[name]
189 tag_id = self.tags[name]
189
190
190 return self.get_commit(commit_id=tag_id)
191 return self.get_commit(commit_id=tag_id)
191
192
192 def remove_tag(self, name, user, message=None, date=None):
193 def remove_tag(self, name, user, message=None, date=None):
193 """
194 """
194 Removes tag with the given `name`.
195 Removes tag with the given `name`.
195
196
196 :param name: name of the tag to be removed
197 :param name: name of the tag to be removed
197 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
198 :param message: message of the tag's removal commit
199 :param message: message of the tag's removal commit
199 :param date: date of tag's removal commit
200 :param date: date of tag's removal commit
200
201
201 :raises TagDoesNotExistError: if tag with given name does not exists
202 :raises TagDoesNotExistError: if tag with given name does not exists
202 """
203 """
203 if name not in self.tags:
204 if name not in self.tags:
204 raise TagDoesNotExistError("Tag %s does not exist" % name)
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
205 if message is None:
206 if message is None:
206 message = "Removed tag %s" % name
207 message = "Removed tag %s" % name
207 local = False
208 local = False
208
209
209 date, tz = date_to_timestamp_plus_offset(date)
210 date, tz = date_to_timestamp_plus_offset(date)
210
211
211 self._remote.tag(name, nullid, message, local, user, date, tz)
212 self._remote.tag(name, nullid, message, local, user, date, tz)
212 self._remote.invalidate_vcs_cache()
213 self._remote.invalidate_vcs_cache()
213 self.tags = self._get_tags()
214 self.tags = self._get_tags()
214
215
215 @LazyProperty
216 @LazyProperty
216 def bookmarks(self):
217 def bookmarks(self):
217 """
218 """
218 Gets bookmarks for this repository
219 Gets bookmarks for this repository
219 """
220 """
220 return self._get_bookmarks()
221 return self._get_bookmarks()
221
222
222 def _get_bookmarks(self):
223 def _get_bookmarks(self):
223 if self.is_empty():
224 if self.is_empty():
224 return {}
225 return {}
225
226
226 def get_name(ctx):
227 def get_name(ctx):
227 return ctx[0]
228 return ctx[0]
228
229
229 _bookmarks = [
230 _bookmarks = [
230 (safe_unicode(n), hexlify(h)) for n, h in
231 (safe_unicode(n), hexlify(h)) for n, h in
231 self._remote.bookmarks().items()]
232 self._remote.bookmarks().items()]
232
233
233 return OrderedDict(sorted(_bookmarks, key=get_name))
234 return OrderedDict(sorted(_bookmarks, key=get_name))
234
235
235 def _get_all_commit_ids(self):
236 def _get_all_commit_ids(self):
236 return self._remote.get_all_commit_ids('visible')
237 return self._remote.get_all_commit_ids('visible')
237
238
238 def get_diff(
239 def get_diff(
239 self, commit1, commit2, path='', ignore_whitespace=False,
240 self, commit1, commit2, path='', ignore_whitespace=False,
240 context=3, path1=None):
241 context=3, path1=None):
241 """
242 """
242 Returns (git like) *diff*, as plain text. Shows changes introduced by
243 Returns (git like) *diff*, as plain text. Shows changes introduced by
243 `commit2` since `commit1`.
244 `commit2` since `commit1`.
244
245
245 :param commit1: Entry point from which diff is shown. Can be
246 :param commit1: Entry point from which diff is shown. Can be
246 ``self.EMPTY_COMMIT`` - in this case, patch showing all
247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
247 the changes since empty state of the repository until `commit2`
248 the changes since empty state of the repository until `commit2`
248 :param commit2: Until which commit changes should be shown.
249 :param commit2: Until which commit changes should be shown.
249 :param ignore_whitespace: If set to ``True``, would not show whitespace
250 :param ignore_whitespace: If set to ``True``, would not show whitespace
250 changes. Defaults to ``False``.
251 changes. Defaults to ``False``.
251 :param context: How many lines before/after changed lines should be
252 :param context: How many lines before/after changed lines should be
252 shown. Defaults to ``3``.
253 shown. Defaults to ``3``.
253 """
254 """
254 self._validate_diff_commits(commit1, commit2)
255 self._validate_diff_commits(commit1, commit2)
255 if path1 is not None and path1 != path:
256 if path1 is not None and path1 != path:
256 raise ValueError("Diff of two different paths not supported.")
257 raise ValueError("Diff of two different paths not supported.")
257
258
258 if path:
259 if path:
259 file_filter = [self.path, path]
260 file_filter = [self.path, path]
260 else:
261 else:
261 file_filter = None
262 file_filter = None
262
263
263 diff = self._remote.diff(
264 diff = self._remote.diff(
264 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
265 opt_git=True, opt_ignorews=ignore_whitespace,
266 opt_git=True, opt_ignorews=ignore_whitespace,
266 context=context)
267 context=context)
267 return MercurialDiff(diff)
268 return MercurialDiff(diff)
268
269
269 def strip(self, commit_id, branch=None):
270 def strip(self, commit_id, branch=None):
270 self._remote.strip(commit_id, update=False, backup="none")
271 self._remote.strip(commit_id, update=False, backup="none")
271
272
272 self._remote.invalidate_vcs_cache()
273 self._remote.invalidate_vcs_cache()
273 self.commit_ids = self._get_all_commit_ids()
274 self.commit_ids = self._get_all_commit_ids()
274 self._rebuild_cache(self.commit_ids)
275 self._rebuild_cache(self.commit_ids)
275
276
276 def verify(self):
277 def verify(self):
277 verify = self._remote.verify()
278 verify = self._remote.verify()
278
279
279 self._remote.invalidate_vcs_cache()
280 self._remote.invalidate_vcs_cache()
280 return verify
281 return verify
281
282
282 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
283 if commit_id1 == commit_id2:
284 if commit_id1 == commit_id2:
284 return commit_id1
285 return commit_id1
285
286
286 ancestors = self._remote.revs_from_revspec(
287 ancestors = self._remote.revs_from_revspec(
287 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
288 other_path=repo2.path)
289 other_path=repo2.path)
289 return repo2[ancestors[0]].raw_id if ancestors else None
290 return repo2[ancestors[0]].raw_id if ancestors else None
290
291
291 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
292 if commit_id1 == commit_id2:
293 if commit_id1 == commit_id2:
293 commits = []
294 commits = []
294 else:
295 else:
295 if merge:
296 if merge:
296 indexes = self._remote.revs_from_revspec(
297 indexes = self._remote.revs_from_revspec(
297 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
298 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
299 else:
300 else:
300 indexes = self._remote.revs_from_revspec(
301 indexes = self._remote.revs_from_revspec(
301 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
302 commit_id1, other_path=repo2.path)
303 commit_id1, other_path=repo2.path)
303
304
304 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
305 for idx in indexes]
306 for idx in indexes]
306
307
307 return commits
308 return commits
308
309
309 @staticmethod
310 @staticmethod
310 def check_url(url, config):
311 def check_url(url, config):
311 """
312 """
312 Function will check given url and try to verify if it's a valid
313 Function will check given url and try to verify if it's a valid
313 link. Sometimes it may happened that mercurial will issue basic
314 link. Sometimes it may happened that mercurial will issue basic
314 auth request that can cause whole API to hang when used from python
315 auth request that can cause whole API to hang when used from python
315 or other external calls.
316 or other external calls.
316
317
317 On failures it'll raise urllib2.HTTPError, exception is also thrown
318 On failures it'll raise urllib2.HTTPError, exception is also thrown
318 when the return code is non 200
319 when the return code is non 200
319 """
320 """
320 # check first if it's not an local url
321 # check first if it's not an local url
321 if os.path.isdir(url) or url.startswith('file:'):
322 if os.path.isdir(url) or url.startswith('file:'):
322 return True
323 return True
323
324
324 # Request the _remote to verify the url
325 # Request the _remote to verify the url
325 return connection.Hg.check_url(url, config.serialize())
326 return connection.Hg.check_url(url, config.serialize())
326
327
327 @staticmethod
328 @staticmethod
328 def is_valid_repository(path):
329 def is_valid_repository(path):
329 return os.path.isdir(os.path.join(path, '.hg'))
330 return os.path.isdir(os.path.join(path, '.hg'))
330
331
331 def _init_repo(self, create, src_url=None, update_after_clone=False):
332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
332 """
333 """
333 Function will check for mercurial repository in given path. If there
334 Function will check for mercurial repository in given path. If there
334 is no repository in that path it will raise an exception unless
335 is no repository in that path it will raise an exception unless
335 `create` parameter is set to True - in that case repository would
336 `create` parameter is set to True - in that case repository would
336 be created.
337 be created.
337
338
338 If `src_url` is given, would try to clone repository from the
339 If `src_url` is given, would try to clone repository from the
339 location at given clone_point. Additionally it'll make update to
340 location at given clone_point. Additionally it'll make update to
340 working copy accordingly to `update_after_clone` flag.
341 working copy accordingly to `do_workspace_checkout` flag.
341 """
342 """
342 if create and os.path.exists(self.path):
343 if create and os.path.exists(self.path):
343 raise RepositoryError(
344 raise RepositoryError(
344 "Cannot create repository at %s, location already exist"
345 "Cannot create repository at %s, location already exist"
345 % self.path)
346 % self.path)
346
347
347 if src_url:
348 if src_url:
348 url = str(self._get_url(src_url))
349 url = str(self._get_url(src_url))
349 MercurialRepository.check_url(url, self.config)
350 MercurialRepository.check_url(url, self.config)
350
351
351 self._remote.clone(url, self.path, update_after_clone)
352 self._remote.clone(url, self.path, do_workspace_checkout)
352
353
353 # Don't try to create if we've already cloned repo
354 # Don't try to create if we've already cloned repo
354 create = False
355 create = False
355
356
356 if create:
357 if create:
357 os.makedirs(self.path, mode=0755)
358 os.makedirs(self.path, mode=0755)
358
359
359 self._remote.localrepository(create)
360 self._remote.localrepository(create)
360
361
361 @LazyProperty
362 @LazyProperty
362 def in_memory_commit(self):
363 def in_memory_commit(self):
363 return MercurialInMemoryCommit(self)
364 return MercurialInMemoryCommit(self)
364
365
365 @LazyProperty
366 @LazyProperty
366 def description(self):
367 def description(self):
367 description = self._remote.get_config_value(
368 description = self._remote.get_config_value(
368 'web', 'description', untrusted=True)
369 'web', 'description', untrusted=True)
369 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
370
371
371 @LazyProperty
372 @LazyProperty
372 def contact(self):
373 def contact(self):
373 contact = (
374 contact = (
374 self._remote.get_config_value("web", "contact") or
375 self._remote.get_config_value("web", "contact") or
375 self._remote.get_config_value("ui", "username"))
376 self._remote.get_config_value("ui", "username"))
376 return safe_unicode(contact or self.DEFAULT_CONTACT)
377 return safe_unicode(contact or self.DEFAULT_CONTACT)
377
378
378 @LazyProperty
379 @LazyProperty
379 def last_change(self):
380 def last_change(self):
380 """
381 """
381 Returns last change made on this repository as
382 Returns last change made on this repository as
382 `datetime.datetime` object.
383 `datetime.datetime` object.
383 """
384 """
384 try:
385 try:
385 return self.get_commit().date
386 return self.get_commit().date
386 except RepositoryError:
387 except RepositoryError:
387 tzoffset = makedate()[1]
388 tzoffset = makedate()[1]
388 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
389
390
390 def _get_fs_mtime(self):
391 def _get_fs_mtime(self):
391 # fallback to filesystem
392 # fallback to filesystem
392 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
393 st_path = os.path.join(self.path, '.hg', "store")
394 st_path = os.path.join(self.path, '.hg', "store")
394 if os.path.exists(cl_path):
395 if os.path.exists(cl_path):
395 return os.stat(cl_path).st_mtime
396 return os.stat(cl_path).st_mtime
396 else:
397 else:
397 return os.stat(st_path).st_mtime
398 return os.stat(st_path).st_mtime
398
399
399 def _get_url(self, url):
400 def _get_url(self, url):
400 """
401 """
401 Returns normalized url. If schema is not given, would fall
402 Returns normalized url. If schema is not given, would fall
402 to filesystem
403 to filesystem
403 (``file:///``) schema.
404 (``file:///``) schema.
404 """
405 """
405 url = url.encode('utf8')
406 url = url.encode('utf8')
406 if url != 'default' and '://' not in url:
407 if url != 'default' and '://' not in url:
407 url = "file:" + urllib.pathname2url(url)
408 url = "file:" + urllib.pathname2url(url)
408 return url
409 return url
409
410
410 def get_hook_location(self):
411 def get_hook_location(self):
411 """
412 """
412 returns absolute path to location where hooks are stored
413 returns absolute path to location where hooks are stored
413 """
414 """
414 return os.path.join(self.path, '.hg', '.hgrc')
415 return os.path.join(self.path, '.hg', '.hgrc')
415
416
416 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
417 """
418 """
418 Returns ``MercurialCommit`` object representing repository's
419 Returns ``MercurialCommit`` object representing repository's
419 commit at the given `commit_id` or `commit_idx`.
420 commit at the given `commit_id` or `commit_idx`.
420 """
421 """
421 if self.is_empty():
422 if self.is_empty():
422 raise EmptyRepositoryError("There are no commits yet")
423 raise EmptyRepositoryError("There are no commits yet")
423
424
424 if commit_id is not None:
425 if commit_id is not None:
425 self._validate_commit_id(commit_id)
426 self._validate_commit_id(commit_id)
426 try:
427 try:
427 idx = self._commit_ids[commit_id]
428 idx = self._commit_ids[commit_id]
428 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
429 except KeyError:
430 except KeyError:
430 pass
431 pass
431 elif commit_idx is not None:
432 elif commit_idx is not None:
432 self._validate_commit_idx(commit_idx)
433 self._validate_commit_idx(commit_idx)
433 try:
434 try:
434 id_ = self.commit_ids[commit_idx]
435 id_ = self.commit_ids[commit_idx]
435 if commit_idx < 0:
436 if commit_idx < 0:
436 commit_idx += len(self.commit_ids)
437 commit_idx += len(self.commit_ids)
437 return MercurialCommit(
438 return MercurialCommit(
438 self, id_, commit_idx, pre_load=pre_load)
439 self, id_, commit_idx, pre_load=pre_load)
439 except IndexError:
440 except IndexError:
440 commit_id = commit_idx
441 commit_id = commit_idx
441 else:
442 else:
442 commit_id = "tip"
443 commit_id = "tip"
443
444
444 if isinstance(commit_id, unicode):
445 if isinstance(commit_id, unicode):
445 commit_id = safe_str(commit_id)
446 commit_id = safe_str(commit_id)
446
447
447 try:
448 try:
448 raw_id, idx = self._remote.lookup(commit_id, both=True)
449 raw_id, idx = self._remote.lookup(commit_id, both=True)
449 except CommitDoesNotExistError:
450 except CommitDoesNotExistError:
450 msg = "Commit %s does not exist for %s" % (
451 msg = "Commit %s does not exist for %s" % (
451 commit_id, self)
452 commit_id, self)
452 raise CommitDoesNotExistError(msg)
453 raise CommitDoesNotExistError(msg)
453
454
454 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
455
456
456 def get_commits(
457 def get_commits(
457 self, start_id=None, end_id=None, start_date=None, end_date=None,
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
458 branch_name=None, show_hidden=False, pre_load=None):
459 branch_name=None, show_hidden=False, pre_load=None):
459 """
460 """
460 Returns generator of ``MercurialCommit`` objects from start to end
461 Returns generator of ``MercurialCommit`` objects from start to end
461 (both are inclusive)
462 (both are inclusive)
462
463
463 :param start_id: None, str(commit_id)
464 :param start_id: None, str(commit_id)
464 :param end_id: None, str(commit_id)
465 :param end_id: None, str(commit_id)
465 :param start_date: if specified, commits with commit date less than
466 :param start_date: if specified, commits with commit date less than
466 ``start_date`` would be filtered out from returned set
467 ``start_date`` would be filtered out from returned set
467 :param end_date: if specified, commits with commit date greater than
468 :param end_date: if specified, commits with commit date greater than
468 ``end_date`` would be filtered out from returned set
469 ``end_date`` would be filtered out from returned set
469 :param branch_name: if specified, commits not reachable from given
470 :param branch_name: if specified, commits not reachable from given
470 branch would be filtered out from returned set
471 branch would be filtered out from returned set
471 :param show_hidden: Show hidden commits such as obsolete or hidden from
472 :param show_hidden: Show hidden commits such as obsolete or hidden from
472 Mercurial evolve
473 Mercurial evolve
473 :raise BranchDoesNotExistError: If given ``branch_name`` does not
474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
474 exist.
475 exist.
475 :raise CommitDoesNotExistError: If commit for given ``start`` or
476 :raise CommitDoesNotExistError: If commit for given ``start`` or
476 ``end`` could not be found.
477 ``end`` could not be found.
477 """
478 """
478 # actually we should check now if it's not an empty repo
479 # actually we should check now if it's not an empty repo
479 branch_ancestors = False
480 branch_ancestors = False
480 if self.is_empty():
481 if self.is_empty():
481 raise EmptyRepositoryError("There are no commits yet")
482 raise EmptyRepositoryError("There are no commits yet")
482 self._validate_branch_name(branch_name)
483 self._validate_branch_name(branch_name)
483
484
484 if start_id is not None:
485 if start_id is not None:
485 self._validate_commit_id(start_id)
486 self._validate_commit_id(start_id)
486 c_start = self.get_commit(commit_id=start_id)
487 c_start = self.get_commit(commit_id=start_id)
487 start_pos = self._commit_ids[c_start.raw_id]
488 start_pos = self._commit_ids[c_start.raw_id]
488 else:
489 else:
489 start_pos = None
490 start_pos = None
490
491
491 if end_id is not None:
492 if end_id is not None:
492 self._validate_commit_id(end_id)
493 self._validate_commit_id(end_id)
493 c_end = self.get_commit(commit_id=end_id)
494 c_end = self.get_commit(commit_id=end_id)
494 end_pos = max(0, self._commit_ids[c_end.raw_id])
495 end_pos = max(0, self._commit_ids[c_end.raw_id])
495 else:
496 else:
496 end_pos = None
497 end_pos = None
497
498
498 if None not in [start_id, end_id] and start_pos > end_pos:
499 if None not in [start_id, end_id] and start_pos > end_pos:
499 raise RepositoryError(
500 raise RepositoryError(
500 "Start commit '%s' cannot be after end commit '%s'" %
501 "Start commit '%s' cannot be after end commit '%s'" %
501 (start_id, end_id))
502 (start_id, end_id))
502
503
503 if end_pos is not None:
504 if end_pos is not None:
504 end_pos += 1
505 end_pos += 1
505
506
506 commit_filter = []
507 commit_filter = []
507
508
508 if branch_name and not branch_ancestors:
509 if branch_name and not branch_ancestors:
509 commit_filter.append('branch("%s")' % (branch_name,))
510 commit_filter.append('branch("%s")' % (branch_name,))
510 elif branch_name and branch_ancestors:
511 elif branch_name and branch_ancestors:
511 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
512
513
513 if start_date and not end_date:
514 if start_date and not end_date:
514 commit_filter.append('date(">%s")' % (start_date,))
515 commit_filter.append('date(">%s")' % (start_date,))
515 if end_date and not start_date:
516 if end_date and not start_date:
516 commit_filter.append('date("<%s")' % (end_date,))
517 commit_filter.append('date("<%s")' % (end_date,))
517 if start_date and end_date:
518 if start_date and end_date:
518 commit_filter.append(
519 commit_filter.append(
519 'date(">%s") and date("<%s")' % (start_date, end_date))
520 'date(">%s") and date("<%s")' % (start_date, end_date))
520
521
521 if not show_hidden:
522 if not show_hidden:
522 commit_filter.append('not obsolete()')
523 commit_filter.append('not obsolete()')
523 commit_filter.append('not hidden()')
524 commit_filter.append('not hidden()')
524
525
525 # TODO: johbo: Figure out a simpler way for this solution
526 # TODO: johbo: Figure out a simpler way for this solution
526 collection_generator = CollectionGenerator
527 collection_generator = CollectionGenerator
527 if commit_filter:
528 if commit_filter:
528 commit_filter = ' and '.join(map(safe_str, commit_filter))
529 commit_filter = ' and '.join(map(safe_str, commit_filter))
529 revisions = self._remote.rev_range([commit_filter])
530 revisions = self._remote.rev_range([commit_filter])
530 collection_generator = MercurialIndexBasedCollectionGenerator
531 collection_generator = MercurialIndexBasedCollectionGenerator
531 else:
532 else:
532 revisions = self.commit_ids
533 revisions = self.commit_ids
533
534
534 if start_pos or end_pos:
535 if start_pos or end_pos:
535 revisions = revisions[start_pos:end_pos]
536 revisions = revisions[start_pos:end_pos]
536
537
537 return collection_generator(self, revisions, pre_load=pre_load)
538 return collection_generator(self, revisions, pre_load=pre_load)
538
539
539 def pull(self, url, commit_ids=None):
540 def pull(self, url, commit_ids=None):
540 """
541 """
541 Tries to pull changes from external location.
542 Pull changes from external location.
542
543
543 :param commit_ids: Optional. Can be set to a list of commit ids
544 :param commit_ids: Optional. Can be set to a list of commit ids
544 which shall be pulled from the other repository.
545 which shall be pulled from the other repository.
545 """
546 """
546 url = self._get_url(url)
547 url = self._get_url(url)
547 self._remote.pull(url, commit_ids=commit_ids)
548 self._remote.pull(url, commit_ids=commit_ids)
548 self._remote.invalidate_vcs_cache()
549 self._remote.invalidate_vcs_cache()
549
550
551 def fetch(self, url, commit_ids=None):
552 """
553 Backward compatibility with GIT fetch==pull
554 """
555 return self.pull(url, commit_ids=commit_ids)
556
550 def push(self, url):
557 def push(self, url):
551 url = self._get_url(url)
558 url = self._get_url(url)
552 self._remote.sync_push(url)
559 self._remote.sync_push(url)
553
560
554 def _local_clone(self, clone_path):
561 def _local_clone(self, clone_path):
555 """
562 """
556 Create a local clone of the current repo.
563 Create a local clone of the current repo.
557 """
564 """
558 self._remote.clone(self.path, clone_path, update_after_clone=True,
565 self._remote.clone(self.path, clone_path, update_after_clone=True,
559 hooks=False)
566 hooks=False)
560
567
561 def _update(self, revision, clean=False):
568 def _update(self, revision, clean=False):
562 """
569 """
563 Update the working copy to the specified revision.
570 Update the working copy to the specified revision.
564 """
571 """
565 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
566 self._remote.update(revision, clean=clean)
573 self._remote.update(revision, clean=clean)
567
574
568 def _identify(self):
575 def _identify(self):
569 """
576 """
570 Return the current state of the working directory.
577 Return the current state of the working directory.
571 """
578 """
572 return self._remote.identify().strip().rstrip('+')
579 return self._remote.identify().strip().rstrip('+')
573
580
574 def _heads(self, branch=None):
581 def _heads(self, branch=None):
575 """
582 """
576 Return the commit ids of the repository heads.
583 Return the commit ids of the repository heads.
577 """
584 """
578 return self._remote.heads(branch=branch).strip().split(' ')
585 return self._remote.heads(branch=branch).strip().split(' ')
579
586
580 def _ancestor(self, revision1, revision2):
587 def _ancestor(self, revision1, revision2):
581 """
588 """
582 Return the common ancestor of the two revisions.
589 Return the common ancestor of the two revisions.
583 """
590 """
584 return self._remote.ancestor(revision1, revision2)
591 return self._remote.ancestor(revision1, revision2)
585
592
586 def _local_push(
593 def _local_push(
587 self, revision, repository_path, push_branches=False,
594 self, revision, repository_path, push_branches=False,
588 enable_hooks=False):
595 enable_hooks=False):
589 """
596 """
590 Push the given revision to the specified repository.
597 Push the given revision to the specified repository.
591
598
592 :param push_branches: allow to create branches in the target repo.
599 :param push_branches: allow to create branches in the target repo.
593 """
600 """
594 self._remote.push(
601 self._remote.push(
595 [revision], repository_path, hooks=enable_hooks,
602 [revision], repository_path, hooks=enable_hooks,
596 push_branches=push_branches)
603 push_branches=push_branches)
597
604
598 def _local_merge(self, target_ref, merge_message, user_name, user_email,
605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
599 source_ref, use_rebase=False, dry_run=False):
606 source_ref, use_rebase=False, dry_run=False):
600 """
607 """
601 Merge the given source_revision into the checked out revision.
608 Merge the given source_revision into the checked out revision.
602
609
603 Returns the commit id of the merge and a boolean indicating if the
610 Returns the commit id of the merge and a boolean indicating if the
604 commit needs to be pushed.
611 commit needs to be pushed.
605 """
612 """
606 self._update(target_ref.commit_id)
613 self._update(target_ref.commit_id)
607
614
608 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
609 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
610
617
611 if ancestor == source_ref.commit_id:
618 if ancestor == source_ref.commit_id:
612 # Nothing to do, the changes were already integrated
619 # Nothing to do, the changes were already integrated
613 return target_ref.commit_id, False
620 return target_ref.commit_id, False
614
621
615 elif ancestor == target_ref.commit_id and is_the_same_branch:
622 elif ancestor == target_ref.commit_id and is_the_same_branch:
616 # In this case we should force a commit message
623 # In this case we should force a commit message
617 return source_ref.commit_id, True
624 return source_ref.commit_id, True
618
625
619 if use_rebase:
626 if use_rebase:
620 try:
627 try:
621 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
622 target_ref.commit_id)
629 target_ref.commit_id)
623 self.bookmark(bookmark_name, revision=source_ref.commit_id)
630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
624 self._remote.rebase(
631 self._remote.rebase(
625 source=source_ref.commit_id, dest=target_ref.commit_id)
632 source=source_ref.commit_id, dest=target_ref.commit_id)
626 self._remote.invalidate_vcs_cache()
633 self._remote.invalidate_vcs_cache()
627 self._update(bookmark_name)
634 self._update(bookmark_name)
628 return self._identify(), True
635 return self._identify(), True
629 except RepositoryError:
636 except RepositoryError:
630 # The rebase-abort may raise another exception which 'hides'
637 # The rebase-abort may raise another exception which 'hides'
631 # the original one, therefore we log it here.
638 # the original one, therefore we log it here.
632 log.exception('Error while rebasing shadow repo during merge.')
639 log.exception('Error while rebasing shadow repo during merge.')
633
640
634 # Cleanup any rebase leftovers
641 # Cleanup any rebase leftovers
635 self._remote.invalidate_vcs_cache()
642 self._remote.invalidate_vcs_cache()
636 self._remote.rebase(abort=True)
643 self._remote.rebase(abort=True)
637 self._remote.invalidate_vcs_cache()
644 self._remote.invalidate_vcs_cache()
638 self._remote.update(clean=True)
645 self._remote.update(clean=True)
639 raise
646 raise
640 else:
647 else:
641 try:
648 try:
642 self._remote.merge(source_ref.commit_id)
649 self._remote.merge(source_ref.commit_id)
643 self._remote.invalidate_vcs_cache()
650 self._remote.invalidate_vcs_cache()
644 self._remote.commit(
651 self._remote.commit(
645 message=safe_str(merge_message),
652 message=safe_str(merge_message),
646 username=safe_str('%s <%s>' % (user_name, user_email)))
653 username=safe_str('%s <%s>' % (user_name, user_email)))
647 self._remote.invalidate_vcs_cache()
654 self._remote.invalidate_vcs_cache()
648 return self._identify(), True
655 return self._identify(), True
649 except RepositoryError:
656 except RepositoryError:
650 # Cleanup any merge leftovers
657 # Cleanup any merge leftovers
651 self._remote.update(clean=True)
658 self._remote.update(clean=True)
652 raise
659 raise
653
660
654 def _local_close(self, target_ref, user_name, user_email,
661 def _local_close(self, target_ref, user_name, user_email,
655 source_ref, close_message=''):
662 source_ref, close_message=''):
656 """
663 """
657 Close the branch of the given source_revision
664 Close the branch of the given source_revision
658
665
659 Returns the commit id of the close and a boolean indicating if the
666 Returns the commit id of the close and a boolean indicating if the
660 commit needs to be pushed.
667 commit needs to be pushed.
661 """
668 """
662 self._update(source_ref.commit_id)
669 self._update(source_ref.commit_id)
663 message = close_message or "Closing branch: `{}`".format(source_ref.name)
670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
664 try:
671 try:
665 self._remote.commit(
672 self._remote.commit(
666 message=safe_str(message),
673 message=safe_str(message),
667 username=safe_str('%s <%s>' % (user_name, user_email)),
674 username=safe_str('%s <%s>' % (user_name, user_email)),
668 close_branch=True)
675 close_branch=True)
669 self._remote.invalidate_vcs_cache()
676 self._remote.invalidate_vcs_cache()
670 return self._identify(), True
677 return self._identify(), True
671 except RepositoryError:
678 except RepositoryError:
672 # Cleanup any commit leftovers
679 # Cleanup any commit leftovers
673 self._remote.update(clean=True)
680 self._remote.update(clean=True)
674 raise
681 raise
675
682
676 def _is_the_same_branch(self, target_ref, source_ref):
683 def _is_the_same_branch(self, target_ref, source_ref):
677 return (
684 return (
678 self._get_branch_name(target_ref) ==
685 self._get_branch_name(target_ref) ==
679 self._get_branch_name(source_ref))
686 self._get_branch_name(source_ref))
680
687
681 def _get_branch_name(self, ref):
688 def _get_branch_name(self, ref):
682 if ref.type == 'branch':
689 if ref.type == 'branch':
683 return ref.name
690 return ref.name
684 return self._remote.ctx_branch(ref.commit_id)
691 return self._remote.ctx_branch(ref.commit_id)
685
692
686 def _maybe_prepare_merge_workspace(
693 def _maybe_prepare_merge_workspace(
687 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
688 shadow_repository_path = self._get_shadow_repository_path(
695 shadow_repository_path = self._get_shadow_repository_path(
689 repo_id, workspace_id)
696 repo_id, workspace_id)
690 if not os.path.exists(shadow_repository_path):
697 if not os.path.exists(shadow_repository_path):
691 self._local_clone(shadow_repository_path)
698 self._local_clone(shadow_repository_path)
692 log.debug(
699 log.debug(
693 'Prepared shadow repository in %s', shadow_repository_path)
700 'Prepared shadow repository in %s', shadow_repository_path)
694
701
695 return shadow_repository_path
702 return shadow_repository_path
696
703
697 def _merge_repo(self, repo_id, workspace_id, target_ref,
704 def _merge_repo(self, repo_id, workspace_id, target_ref,
698 source_repo, source_ref, merge_message,
705 source_repo, source_ref, merge_message,
699 merger_name, merger_email, dry_run=False,
706 merger_name, merger_email, dry_run=False,
700 use_rebase=False, close_branch=False):
707 use_rebase=False, close_branch=False):
701
708
702 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
703 'rebase' if use_rebase else 'merge', dry_run)
710 'rebase' if use_rebase else 'merge', dry_run)
704 if target_ref.commit_id not in self._heads():
711 if target_ref.commit_id not in self._heads():
705 return MergeResponse(
712 return MergeResponse(
706 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
707
714
708 try:
715 try:
709 if (target_ref.type == 'branch' and
716 if (target_ref.type == 'branch' and
710 len(self._heads(target_ref.name)) != 1):
717 len(self._heads(target_ref.name)) != 1):
711 return MergeResponse(
718 return MergeResponse(
712 False, False, None,
719 False, False, None,
713 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
720 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
714 except CommitDoesNotExistError:
721 except CommitDoesNotExistError:
715 log.exception('Failure when looking up branch heads on hg target')
722 log.exception('Failure when looking up branch heads on hg target')
716 return MergeResponse(
723 return MergeResponse(
717 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
724 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
718
725
719 shadow_repository_path = self._maybe_prepare_merge_workspace(
726 shadow_repository_path = self._maybe_prepare_merge_workspace(
720 repo_id, workspace_id, target_ref, source_ref)
727 repo_id, workspace_id, target_ref, source_ref)
721 shadow_repo = self._get_shadow_instance(shadow_repository_path)
728 shadow_repo = self._get_shadow_instance(shadow_repository_path)
722
729
723 log.debug('Pulling in target reference %s', target_ref)
730 log.debug('Pulling in target reference %s', target_ref)
724 self._validate_pull_reference(target_ref)
731 self._validate_pull_reference(target_ref)
725 shadow_repo._local_pull(self.path, target_ref)
732 shadow_repo._local_pull(self.path, target_ref)
726 try:
733 try:
727 log.debug('Pulling in source reference %s', source_ref)
734 log.debug('Pulling in source reference %s', source_ref)
728 source_repo._validate_pull_reference(source_ref)
735 source_repo._validate_pull_reference(source_ref)
729 shadow_repo._local_pull(source_repo.path, source_ref)
736 shadow_repo._local_pull(source_repo.path, source_ref)
730 except CommitDoesNotExistError:
737 except CommitDoesNotExistError:
731 log.exception('Failure when doing local pull on hg shadow repo')
738 log.exception('Failure when doing local pull on hg shadow repo')
732 return MergeResponse(
739 return MergeResponse(
733 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
740 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
734
741
735 merge_ref = None
742 merge_ref = None
736 merge_commit_id = None
743 merge_commit_id = None
737 close_commit_id = None
744 close_commit_id = None
738 merge_failure_reason = MergeFailureReason.NONE
745 merge_failure_reason = MergeFailureReason.NONE
739
746
740 # enforce that close branch should be used only in case we source from
747 # enforce that close branch should be used only in case we source from
741 # an actual Branch
748 # an actual Branch
742 close_branch = close_branch and source_ref.type == 'branch'
749 close_branch = close_branch and source_ref.type == 'branch'
743
750
744 # don't allow to close branch if source and target are the same
751 # don't allow to close branch if source and target are the same
745 close_branch = close_branch and source_ref.name != target_ref.name
752 close_branch = close_branch and source_ref.name != target_ref.name
746
753
747 needs_push_on_close = False
754 needs_push_on_close = False
748 if close_branch and not use_rebase and not dry_run:
755 if close_branch and not use_rebase and not dry_run:
749 try:
756 try:
750 close_commit_id, needs_push_on_close = shadow_repo._local_close(
757 close_commit_id, needs_push_on_close = shadow_repo._local_close(
751 target_ref, merger_name, merger_email, source_ref)
758 target_ref, merger_name, merger_email, source_ref)
752 merge_possible = True
759 merge_possible = True
753 except RepositoryError:
760 except RepositoryError:
754 log.exception(
761 log.exception(
755 'Failure when doing close branch on hg shadow repo')
762 'Failure when doing close branch on hg shadow repo')
756 merge_possible = False
763 merge_possible = False
757 merge_failure_reason = MergeFailureReason.MERGE_FAILED
764 merge_failure_reason = MergeFailureReason.MERGE_FAILED
758 else:
765 else:
759 merge_possible = True
766 merge_possible = True
760
767
761 needs_push = False
768 needs_push = False
762 if merge_possible:
769 if merge_possible:
763 try:
770 try:
764 merge_commit_id, needs_push = shadow_repo._local_merge(
771 merge_commit_id, needs_push = shadow_repo._local_merge(
765 target_ref, merge_message, merger_name, merger_email,
772 target_ref, merge_message, merger_name, merger_email,
766 source_ref, use_rebase=use_rebase, dry_run=dry_run)
773 source_ref, use_rebase=use_rebase, dry_run=dry_run)
767 merge_possible = True
774 merge_possible = True
768
775
769 # read the state of the close action, if it
776 # read the state of the close action, if it
770 # maybe required a push
777 # maybe required a push
771 needs_push = needs_push or needs_push_on_close
778 needs_push = needs_push or needs_push_on_close
772
779
773 # Set a bookmark pointing to the merge commit. This bookmark
780 # Set a bookmark pointing to the merge commit. This bookmark
774 # may be used to easily identify the last successful merge
781 # may be used to easily identify the last successful merge
775 # commit in the shadow repository.
782 # commit in the shadow repository.
776 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
783 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
777 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
784 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
778 except SubrepoMergeError:
785 except SubrepoMergeError:
779 log.exception(
786 log.exception(
780 'Subrepo merge error during local merge on hg shadow repo.')
787 'Subrepo merge error during local merge on hg shadow repo.')
781 merge_possible = False
788 merge_possible = False
782 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
789 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
783 needs_push = False
790 needs_push = False
784 except RepositoryError:
791 except RepositoryError:
785 log.exception('Failure when doing local merge on hg shadow repo')
792 log.exception('Failure when doing local merge on hg shadow repo')
786 merge_possible = False
793 merge_possible = False
787 merge_failure_reason = MergeFailureReason.MERGE_FAILED
794 merge_failure_reason = MergeFailureReason.MERGE_FAILED
788 needs_push = False
795 needs_push = False
789
796
790 if merge_possible and not dry_run:
797 if merge_possible and not dry_run:
791 if needs_push:
798 if needs_push:
792 # In case the target is a bookmark, update it, so after pushing
799 # In case the target is a bookmark, update it, so after pushing
793 # the bookmarks is also updated in the target.
800 # the bookmarks is also updated in the target.
794 if target_ref.type == 'book':
801 if target_ref.type == 'book':
795 shadow_repo.bookmark(
802 shadow_repo.bookmark(
796 target_ref.name, revision=merge_commit_id)
803 target_ref.name, revision=merge_commit_id)
797 try:
804 try:
798 shadow_repo_with_hooks = self._get_shadow_instance(
805 shadow_repo_with_hooks = self._get_shadow_instance(
799 shadow_repository_path,
806 shadow_repository_path,
800 enable_hooks=True)
807 enable_hooks=True)
801 # This is the actual merge action, we push from shadow
808 # This is the actual merge action, we push from shadow
802 # into origin.
809 # into origin.
803 # Note: the push_branches option will push any new branch
810 # Note: the push_branches option will push any new branch
804 # defined in the source repository to the target. This may
811 # defined in the source repository to the target. This may
805 # be dangerous as branches are permanent in Mercurial.
812 # be dangerous as branches are permanent in Mercurial.
806 # This feature was requested in issue #441.
813 # This feature was requested in issue #441.
807 shadow_repo_with_hooks._local_push(
814 shadow_repo_with_hooks._local_push(
808 merge_commit_id, self.path, push_branches=True,
815 merge_commit_id, self.path, push_branches=True,
809 enable_hooks=True)
816 enable_hooks=True)
810
817
811 # maybe we also need to push the close_commit_id
818 # maybe we also need to push the close_commit_id
812 if close_commit_id:
819 if close_commit_id:
813 shadow_repo_with_hooks._local_push(
820 shadow_repo_with_hooks._local_push(
814 close_commit_id, self.path, push_branches=True,
821 close_commit_id, self.path, push_branches=True,
815 enable_hooks=True)
822 enable_hooks=True)
816 merge_succeeded = True
823 merge_succeeded = True
817 except RepositoryError:
824 except RepositoryError:
818 log.exception(
825 log.exception(
819 'Failure when doing local push from the shadow '
826 'Failure when doing local push from the shadow '
820 'repository to the target repository.')
827 'repository to the target repository.')
821 merge_succeeded = False
828 merge_succeeded = False
822 merge_failure_reason = MergeFailureReason.PUSH_FAILED
829 merge_failure_reason = MergeFailureReason.PUSH_FAILED
823 else:
830 else:
824 merge_succeeded = True
831 merge_succeeded = True
825 else:
832 else:
826 merge_succeeded = False
833 merge_succeeded = False
827
834
828 return MergeResponse(
835 return MergeResponse(
829 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
836 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
830
837
831 def _get_shadow_instance(
838 def _get_shadow_instance(
832 self, shadow_repository_path, enable_hooks=False):
839 self, shadow_repository_path, enable_hooks=False):
833 config = self.config.copy()
840 config = self.config.copy()
834 if not enable_hooks:
841 if not enable_hooks:
835 config.clear_section('hooks')
842 config.clear_section('hooks')
836 return MercurialRepository(shadow_repository_path, config)
843 return MercurialRepository(shadow_repository_path, config)
837
844
838 def _validate_pull_reference(self, reference):
845 def _validate_pull_reference(self, reference):
839 if not (reference.name in self.bookmarks or
846 if not (reference.name in self.bookmarks or
840 reference.name in self.branches or
847 reference.name in self.branches or
841 self.get_commit(reference.commit_id)):
848 self.get_commit(reference.commit_id)):
842 raise CommitDoesNotExistError(
849 raise CommitDoesNotExistError(
843 'Unknown branch, bookmark or commit id')
850 'Unknown branch, bookmark or commit id')
844
851
845 def _local_pull(self, repository_path, reference):
852 def _local_pull(self, repository_path, reference):
846 """
853 """
847 Fetch a branch, bookmark or commit from a local repository.
854 Fetch a branch, bookmark or commit from a local repository.
848 """
855 """
849 repository_path = os.path.abspath(repository_path)
856 repository_path = os.path.abspath(repository_path)
850 if repository_path == self.path:
857 if repository_path == self.path:
851 raise ValueError('Cannot pull from the same repository')
858 raise ValueError('Cannot pull from the same repository')
852
859
853 reference_type_to_option_name = {
860 reference_type_to_option_name = {
854 'book': 'bookmark',
861 'book': 'bookmark',
855 'branch': 'branch',
862 'branch': 'branch',
856 }
863 }
857 option_name = reference_type_to_option_name.get(
864 option_name = reference_type_to_option_name.get(
858 reference.type, 'revision')
865 reference.type, 'revision')
859
866
860 if option_name == 'revision':
867 if option_name == 'revision':
861 ref = reference.commit_id
868 ref = reference.commit_id
862 else:
869 else:
863 ref = reference.name
870 ref = reference.name
864
871
865 options = {option_name: [ref]}
872 options = {option_name: [ref]}
866 self._remote.pull_cmd(repository_path, hooks=False, **options)
873 self._remote.pull_cmd(repository_path, hooks=False, **options)
867 self._remote.invalidate_vcs_cache()
874 self._remote.invalidate_vcs_cache()
868
875
869 def bookmark(self, bookmark, revision=None):
876 def bookmark(self, bookmark, revision=None):
870 if isinstance(bookmark, unicode):
877 if isinstance(bookmark, unicode):
871 bookmark = safe_str(bookmark)
878 bookmark = safe_str(bookmark)
872 self._remote.bookmark(bookmark, revision=revision)
879 self._remote.bookmark(bookmark, revision=revision)
873 self._remote.invalidate_vcs_cache()
880 self._remote.invalidate_vcs_cache()
874
881
875 def get_path_permissions(self, username):
882 def get_path_permissions(self, username):
876 hgacl_file = os.path.join(self.path, '.hg/hgacl')
883 hgacl_file = os.path.join(self.path, '.hg/hgacl')
877
884
878 def read_patterns(suffix):
885 def read_patterns(suffix):
879 svalue = None
886 svalue = None
880 try:
887 try:
881 svalue = hgacl.get('narrowhgacl', username + suffix)
888 svalue = hgacl.get('narrowhgacl', username + suffix)
882 except configparser.NoOptionError:
889 except configparser.NoOptionError:
883 try:
890 try:
884 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
891 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
885 except configparser.NoOptionError:
892 except configparser.NoOptionError:
886 pass
893 pass
887 if not svalue:
894 if not svalue:
888 return None
895 return None
889 result = ['/']
896 result = ['/']
890 for pattern in svalue.split():
897 for pattern in svalue.split():
891 result.append(pattern)
898 result.append(pattern)
892 if '*' not in pattern and '?' not in pattern:
899 if '*' not in pattern and '?' not in pattern:
893 result.append(pattern + '/*')
900 result.append(pattern + '/*')
894 return result
901 return result
895
902
896 if os.path.exists(hgacl_file):
903 if os.path.exists(hgacl_file):
897 try:
904 try:
898 hgacl = configparser.RawConfigParser()
905 hgacl = configparser.RawConfigParser()
899 hgacl.read(hgacl_file)
906 hgacl.read(hgacl_file)
900
907
901 includes = read_patterns('.includes')
908 includes = read_patterns('.includes')
902 excludes = read_patterns('.excludes')
909 excludes = read_patterns('.excludes')
903 return BasePathPermissionChecker.create_from_patterns(
910 return BasePathPermissionChecker.create_from_patterns(
904 includes, excludes)
911 includes, excludes)
905 except BaseException as e:
912 except BaseException as e:
906 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
913 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
907 hgacl_file, self.name, e)
914 hgacl_file, self.name, e)
908 raise exceptions.RepositoryRequirementError(msg)
915 raise exceptions.RepositoryRequirementError(msg)
909 else:
916 else:
910 return None
917 return None
911
918
912
919
913 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
920 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
914
921
915 def _commit_factory(self, commit_id):
922 def _commit_factory(self, commit_id):
916 return self.repo.get_commit(
923 return self.repo.get_commit(
917 commit_idx=commit_id, pre_load=self.pre_load)
924 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,343 +1,343 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SVN repository module
22 SVN repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import date_astimestamp
32 from rhodecode.lib.datelib import date_astimestamp
33 from rhodecode.lib.utils import safe_str, safe_unicode
33 from rhodecode.lib.utils import safe_str, safe_unicode
34 from rhodecode.lib.vcs import connection, path as vcspath
34 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs.backends import base
35 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends.svn.commit import (
36 from rhodecode.lib.vcs.backends.svn.commit import (
37 SubversionCommit, _date_from_svn_properties)
37 SubversionCommit, _date_from_svn_properties)
38 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
38 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
39 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 from rhodecode.lib.vcs.conf import settings
40 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
42 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 VCSError, NodeDoesNotExistError)
43 VCSError, NodeDoesNotExistError)
44
44
45
45
46 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
47
47
48
48
49 class SubversionRepository(base.BaseRepository):
49 class SubversionRepository(base.BaseRepository):
50 """
50 """
51 Subversion backend implementation
51 Subversion backend implementation
52
52
53 .. important::
53 .. important::
54
54
55 It is very important to distinguish the commit index and the commit id
55 It is very important to distinguish the commit index and the commit id
56 which is assigned by Subversion. The first one is always handled as an
56 which is assigned by Subversion. The first one is always handled as an
57 `int` by this implementation. The commit id assigned by Subversion on
57 `int` by this implementation. The commit id assigned by Subversion on
58 the other side will always be a `str`.
58 the other side will always be a `str`.
59
59
60 There is a specific trap since the first commit will have the index
60 There is a specific trap since the first commit will have the index
61 ``0`` but the svn id will be ``"1"``.
61 ``0`` but the svn id will be ``"1"``.
62
62
63 """
63 """
64
64
65 # Note: Subversion does not really have a default branch name.
65 # Note: Subversion does not really have a default branch name.
66 DEFAULT_BRANCH_NAME = None
66 DEFAULT_BRANCH_NAME = None
67
67
68 contact = base.BaseRepository.DEFAULT_CONTACT
68 contact = base.BaseRepository.DEFAULT_CONTACT
69 description = base.BaseRepository.DEFAULT_DESCRIPTION
69 description = base.BaseRepository.DEFAULT_DESCRIPTION
70
70
71 def __init__(self, repo_path, config=None, create=False, src_url=None,
71 def __init__(self, repo_path, config=None, create=False, src_url=None, bare=False,
72 **kwargs):
72 **kwargs):
73 self.path = safe_str(os.path.abspath(repo_path))
73 self.path = safe_str(os.path.abspath(repo_path))
74 self.config = config if config else self.get_default_config()
74 self.config = config if config else self.get_default_config()
75
75
76 self._init_repo(create, src_url)
76 self._init_repo(create, src_url)
77
77
78 @LazyProperty
78 @LazyProperty
79 def _remote(self):
79 def _remote(self):
80 return connection.Svn(self.path, self.config)
80 return connection.Svn(self.path, self.config)
81
81
82 def _init_repo(self, create, src_url):
82 def _init_repo(self, create, src_url):
83 if create and os.path.exists(self.path):
83 if create and os.path.exists(self.path):
84 raise RepositoryError(
84 raise RepositoryError(
85 "Cannot create repository at %s, location already exist"
85 "Cannot create repository at %s, location already exist"
86 % self.path)
86 % self.path)
87
87
88 if create:
88 if create:
89 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
89 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
90 if src_url:
90 if src_url:
91 src_url = _sanitize_url(src_url)
91 src_url = _sanitize_url(src_url)
92 self._remote.import_remote_repository(src_url)
92 self._remote.import_remote_repository(src_url)
93 else:
93 else:
94 self._check_path()
94 self._check_path()
95
95
96 @LazyProperty
96 @LazyProperty
97 def commit_ids(self):
97 def commit_ids(self):
98 head = self._remote.lookup(None)
98 head = self._remote.lookup(None)
99 return [str(r) for r in xrange(1, head + 1)]
99 return [str(r) for r in xrange(1, head + 1)]
100
100
101 @LazyProperty
101 @LazyProperty
102 def branches(self):
102 def branches(self):
103 return self._tags_or_branches('vcs_svn_branch')
103 return self._tags_or_branches('vcs_svn_branch')
104
104
105 @LazyProperty
105 @LazyProperty
106 def branches_closed(self):
106 def branches_closed(self):
107 return {}
107 return {}
108
108
109 @LazyProperty
109 @LazyProperty
110 def bookmarks(self):
110 def bookmarks(self):
111 return {}
111 return {}
112
112
113 @LazyProperty
113 @LazyProperty
114 def branches_all(self):
114 def branches_all(self):
115 # TODO: johbo: Implement proper branch support
115 # TODO: johbo: Implement proper branch support
116 all_branches = {}
116 all_branches = {}
117 all_branches.update(self.branches)
117 all_branches.update(self.branches)
118 all_branches.update(self.branches_closed)
118 all_branches.update(self.branches_closed)
119 return all_branches
119 return all_branches
120
120
121 @LazyProperty
121 @LazyProperty
122 def tags(self):
122 def tags(self):
123 return self._tags_or_branches('vcs_svn_tag')
123 return self._tags_or_branches('vcs_svn_tag')
124
124
125 def _tags_or_branches(self, config_section):
125 def _tags_or_branches(self, config_section):
126 found_items = {}
126 found_items = {}
127
127
128 if self.is_empty():
128 if self.is_empty():
129 return {}
129 return {}
130
130
131 for pattern in self._patterns_from_section(config_section):
131 for pattern in self._patterns_from_section(config_section):
132 pattern = vcspath.sanitize(pattern)
132 pattern = vcspath.sanitize(pattern)
133 tip = self.get_commit()
133 tip = self.get_commit()
134 try:
134 try:
135 if pattern.endswith('*'):
135 if pattern.endswith('*'):
136 basedir = tip.get_node(vcspath.dirname(pattern))
136 basedir = tip.get_node(vcspath.dirname(pattern))
137 directories = basedir.dirs
137 directories = basedir.dirs
138 else:
138 else:
139 directories = (tip.get_node(pattern), )
139 directories = (tip.get_node(pattern), )
140 except NodeDoesNotExistError:
140 except NodeDoesNotExistError:
141 continue
141 continue
142 found_items.update(
142 found_items.update(
143 (safe_unicode(n.path),
143 (safe_unicode(n.path),
144 self.commit_ids[-1])
144 self.commit_ids[-1])
145 for n in directories)
145 for n in directories)
146
146
147 def get_name(item):
147 def get_name(item):
148 return item[0]
148 return item[0]
149
149
150 return OrderedDict(sorted(found_items.items(), key=get_name))
150 return OrderedDict(sorted(found_items.items(), key=get_name))
151
151
152 def _patterns_from_section(self, section):
152 def _patterns_from_section(self, section):
153 return (pattern for key, pattern in self.config.items(section))
153 return (pattern for key, pattern in self.config.items(section))
154
154
155 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
155 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
156 if self != repo2:
156 if self != repo2:
157 raise ValueError(
157 raise ValueError(
158 "Subversion does not support getting common ancestor of"
158 "Subversion does not support getting common ancestor of"
159 " different repositories.")
159 " different repositories.")
160
160
161 if int(commit_id1) < int(commit_id2):
161 if int(commit_id1) < int(commit_id2):
162 return commit_id1
162 return commit_id1
163 return commit_id2
163 return commit_id2
164
164
165 def verify(self):
165 def verify(self):
166 verify = self._remote.verify()
166 verify = self._remote.verify()
167
167
168 self._remote.invalidate_vcs_cache()
168 self._remote.invalidate_vcs_cache()
169 return verify
169 return verify
170
170
171 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
171 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
172 # TODO: johbo: Implement better comparison, this is a very naive
172 # TODO: johbo: Implement better comparison, this is a very naive
173 # version which does not allow to compare branches, tags or folders
173 # version which does not allow to compare branches, tags or folders
174 # at all.
174 # at all.
175 if repo2 != self:
175 if repo2 != self:
176 raise ValueError(
176 raise ValueError(
177 "Subversion does not support comparison of of different "
177 "Subversion does not support comparison of of different "
178 "repositories.")
178 "repositories.")
179
179
180 if commit_id1 == commit_id2:
180 if commit_id1 == commit_id2:
181 return []
181 return []
182
182
183 commit_idx1 = self._get_commit_idx(commit_id1)
183 commit_idx1 = self._get_commit_idx(commit_id1)
184 commit_idx2 = self._get_commit_idx(commit_id2)
184 commit_idx2 = self._get_commit_idx(commit_id2)
185
185
186 commits = [
186 commits = [
187 self.get_commit(commit_idx=idx)
187 self.get_commit(commit_idx=idx)
188 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
188 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
189
189
190 return commits
190 return commits
191
191
192 def _get_commit_idx(self, commit_id):
192 def _get_commit_idx(self, commit_id):
193 try:
193 try:
194 svn_rev = int(commit_id)
194 svn_rev = int(commit_id)
195 except:
195 except:
196 # TODO: johbo: this might be only one case, HEAD, check this
196 # TODO: johbo: this might be only one case, HEAD, check this
197 svn_rev = self._remote.lookup(commit_id)
197 svn_rev = self._remote.lookup(commit_id)
198 commit_idx = svn_rev - 1
198 commit_idx = svn_rev - 1
199 if commit_idx >= len(self.commit_ids):
199 if commit_idx >= len(self.commit_ids):
200 raise CommitDoesNotExistError(
200 raise CommitDoesNotExistError(
201 "Commit at index %s does not exist." % (commit_idx, ))
201 "Commit at index %s does not exist." % (commit_idx, ))
202 return commit_idx
202 return commit_idx
203
203
204 @staticmethod
204 @staticmethod
205 def check_url(url, config):
205 def check_url(url, config):
206 """
206 """
207 Check if `url` is a valid source to import a Subversion repository.
207 Check if `url` is a valid source to import a Subversion repository.
208 """
208 """
209 # convert to URL if it's a local directory
209 # convert to URL if it's a local directory
210 if os.path.isdir(url):
210 if os.path.isdir(url):
211 url = 'file://' + urllib.pathname2url(url)
211 url = 'file://' + urllib.pathname2url(url)
212 return connection.Svn.check_url(url, config.serialize())
212 return connection.Svn.check_url(url, config.serialize())
213
213
214 @staticmethod
214 @staticmethod
215 def is_valid_repository(path):
215 def is_valid_repository(path):
216 try:
216 try:
217 SubversionRepository(path)
217 SubversionRepository(path)
218 return True
218 return True
219 except VCSError:
219 except VCSError:
220 pass
220 pass
221 return False
221 return False
222
222
223 def _check_path(self):
223 def _check_path(self):
224 if not os.path.exists(self.path):
224 if not os.path.exists(self.path):
225 raise VCSError('Path "%s" does not exist!' % (self.path, ))
225 raise VCSError('Path "%s" does not exist!' % (self.path, ))
226 if not self._remote.is_path_valid_repository(self.path):
226 if not self._remote.is_path_valid_repository(self.path):
227 raise VCSError(
227 raise VCSError(
228 'Path "%s" does not contain a Subversion repository' %
228 'Path "%s" does not contain a Subversion repository' %
229 (self.path, ))
229 (self.path, ))
230
230
231 @LazyProperty
231 @LazyProperty
232 def last_change(self):
232 def last_change(self):
233 """
233 """
234 Returns last change made on this repository as
234 Returns last change made on this repository as
235 `datetime.datetime` object.
235 `datetime.datetime` object.
236 """
236 """
237 # Subversion always has a first commit which has id "0" and contains
237 # Subversion always has a first commit which has id "0" and contains
238 # what we are looking for.
238 # what we are looking for.
239 last_id = len(self.commit_ids)
239 last_id = len(self.commit_ids)
240 properties = self._remote.revision_properties(last_id)
240 properties = self._remote.revision_properties(last_id)
241 return _date_from_svn_properties(properties)
241 return _date_from_svn_properties(properties)
242
242
243 @LazyProperty
243 @LazyProperty
244 def in_memory_commit(self):
244 def in_memory_commit(self):
245 return SubversionInMemoryCommit(self)
245 return SubversionInMemoryCommit(self)
246
246
247 def get_hook_location(self):
247 def get_hook_location(self):
248 """
248 """
249 returns absolute path to location where hooks are stored
249 returns absolute path to location where hooks are stored
250 """
250 """
251 return os.path.join(self.path, 'hooks')
251 return os.path.join(self.path, 'hooks')
252
252
253 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
253 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
254 if self.is_empty():
254 if self.is_empty():
255 raise EmptyRepositoryError("There are no commits yet")
255 raise EmptyRepositoryError("There are no commits yet")
256 if commit_id is not None:
256 if commit_id is not None:
257 self._validate_commit_id(commit_id)
257 self._validate_commit_id(commit_id)
258 elif commit_idx is not None:
258 elif commit_idx is not None:
259 self._validate_commit_idx(commit_idx)
259 self._validate_commit_idx(commit_idx)
260 try:
260 try:
261 commit_id = self.commit_ids[commit_idx]
261 commit_id = self.commit_ids[commit_idx]
262 except IndexError:
262 except IndexError:
263 raise CommitDoesNotExistError
263 raise CommitDoesNotExistError
264
264
265 commit_id = self._sanitize_commit_id(commit_id)
265 commit_id = self._sanitize_commit_id(commit_id)
266 commit = SubversionCommit(repository=self, commit_id=commit_id)
266 commit = SubversionCommit(repository=self, commit_id=commit_id)
267 return commit
267 return commit
268
268
269 def get_commits(
269 def get_commits(
270 self, start_id=None, end_id=None, start_date=None, end_date=None,
270 self, start_id=None, end_id=None, start_date=None, end_date=None,
271 branch_name=None, show_hidden=False, pre_load=None):
271 branch_name=None, show_hidden=False, pre_load=None):
272 if self.is_empty():
272 if self.is_empty():
273 raise EmptyRepositoryError("There are no commit_ids yet")
273 raise EmptyRepositoryError("There are no commit_ids yet")
274 self._validate_branch_name(branch_name)
274 self._validate_branch_name(branch_name)
275
275
276 if start_id is not None:
276 if start_id is not None:
277 self._validate_commit_id(start_id)
277 self._validate_commit_id(start_id)
278 if end_id is not None:
278 if end_id is not None:
279 self._validate_commit_id(end_id)
279 self._validate_commit_id(end_id)
280
280
281 start_raw_id = self._sanitize_commit_id(start_id)
281 start_raw_id = self._sanitize_commit_id(start_id)
282 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
282 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
283 end_raw_id = self._sanitize_commit_id(end_id)
283 end_raw_id = self._sanitize_commit_id(end_id)
284 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
284 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
285
285
286 if None not in [start_id, end_id] and start_pos > end_pos:
286 if None not in [start_id, end_id] and start_pos > end_pos:
287 raise RepositoryError(
287 raise RepositoryError(
288 "Start commit '%s' cannot be after end commit '%s'" %
288 "Start commit '%s' cannot be after end commit '%s'" %
289 (start_id, end_id))
289 (start_id, end_id))
290 if end_pos is not None:
290 if end_pos is not None:
291 end_pos += 1
291 end_pos += 1
292
292
293 # Date based filtering
293 # Date based filtering
294 if start_date or end_date:
294 if start_date or end_date:
295 start_raw_id, end_raw_id = self._remote.lookup_interval(
295 start_raw_id, end_raw_id = self._remote.lookup_interval(
296 date_astimestamp(start_date) if start_date else None,
296 date_astimestamp(start_date) if start_date else None,
297 date_astimestamp(end_date) if end_date else None)
297 date_astimestamp(end_date) if end_date else None)
298 start_pos = start_raw_id - 1
298 start_pos = start_raw_id - 1
299 end_pos = end_raw_id
299 end_pos = end_raw_id
300
300
301 commit_ids = self.commit_ids
301 commit_ids = self.commit_ids
302
302
303 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
303 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
304 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
304 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
305 svn_rev = long(self.commit_ids[-1])
305 svn_rev = long(self.commit_ids[-1])
306 commit_ids = self._remote.node_history(
306 commit_ids = self._remote.node_history(
307 path=branch_name, revision=svn_rev, limit=None)
307 path=branch_name, revision=svn_rev, limit=None)
308 commit_ids = [str(i) for i in reversed(commit_ids)]
308 commit_ids = [str(i) for i in reversed(commit_ids)]
309
309
310 if start_pos or end_pos:
310 if start_pos or end_pos:
311 commit_ids = commit_ids[start_pos:end_pos]
311 commit_ids = commit_ids[start_pos:end_pos]
312 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
312 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
313
313
314 def _sanitize_commit_id(self, commit_id):
314 def _sanitize_commit_id(self, commit_id):
315 if commit_id and commit_id.isdigit():
315 if commit_id and commit_id.isdigit():
316 if int(commit_id) <= len(self.commit_ids):
316 if int(commit_id) <= len(self.commit_ids):
317 return commit_id
317 return commit_id
318 else:
318 else:
319 raise CommitDoesNotExistError(
319 raise CommitDoesNotExistError(
320 "Commit %s does not exist." % (commit_id, ))
320 "Commit %s does not exist." % (commit_id, ))
321 if commit_id not in [
321 if commit_id not in [
322 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
322 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
323 raise CommitDoesNotExistError(
323 raise CommitDoesNotExistError(
324 "Commit id %s not understood." % (commit_id, ))
324 "Commit id %s not understood." % (commit_id, ))
325 svn_rev = self._remote.lookup('HEAD')
325 svn_rev = self._remote.lookup('HEAD')
326 return str(svn_rev)
326 return str(svn_rev)
327
327
328 def get_diff(
328 def get_diff(
329 self, commit1, commit2, path=None, ignore_whitespace=False,
329 self, commit1, commit2, path=None, ignore_whitespace=False,
330 context=3, path1=None):
330 context=3, path1=None):
331 self._validate_diff_commits(commit1, commit2)
331 self._validate_diff_commits(commit1, commit2)
332 svn_rev1 = long(commit1.raw_id)
332 svn_rev1 = long(commit1.raw_id)
333 svn_rev2 = long(commit2.raw_id)
333 svn_rev2 = long(commit2.raw_id)
334 diff = self._remote.diff(
334 diff = self._remote.diff(
335 svn_rev1, svn_rev2, path1=path1, path2=path,
335 svn_rev1, svn_rev2, path1=path1, path2=path,
336 ignore_whitespace=ignore_whitespace, context=context)
336 ignore_whitespace=ignore_whitespace, context=context)
337 return SubversionDiff(diff)
337 return SubversionDiff(diff)
338
338
339
339
340 def _sanitize_url(url):
340 def _sanitize_url(url):
341 if '://' not in url:
341 if '://' not in url:
342 url = 'file://' + urllib.pathname2url(url)
342 url = 'file://' + urllib.pathname2url(url)
343 return url
343 return url
@@ -1,833 +1,833 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import traceback
26 import traceback
27 import logging
27 import logging
28 import cStringIO
28 import cStringIO
29
29
30 from sqlalchemy import func
30 from sqlalchemy import func
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
41 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
43 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
44 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
45 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 PullRequest)
51 PullRequest)
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class UserTemp(object):
58 class UserTemp(object):
59 def __init__(self, user_id):
59 def __init__(self, user_id):
60 self.user_id = user_id
60 self.user_id = user_id
61
61
62 def __repr__(self):
62 def __repr__(self):
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64
64
65
65
66 class RepoTemp(object):
66 class RepoTemp(object):
67 def __init__(self, repo_id):
67 def __init__(self, repo_id):
68 self.repo_id = repo_id
68 self.repo_id = repo_id
69
69
70 def __repr__(self):
70 def __repr__(self):
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72
72
73
73
74 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
75 """
75 """
76 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
77 and with cache usage
77 and with cache usage
78 """
78 """
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
81 self.repos_path = repos_path
82 self.order_by = order_by
82 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
84 if not perm_set:
84 if not perm_set:
85 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
86 'repository.admin']
86 'repository.admin']
87 self.perm_set = perm_set
87 self.perm_set = perm_set
88
88
89 def __len__(self):
89 def __len__(self):
90 return len(self.db_repo_list)
90 return len(self.db_repo_list)
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94
94
95 def __iter__(self):
95 def __iter__(self):
96 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
97 # check permission at this level
97 # check permission at this level
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
100 if not has_perm:
100 if not has_perm:
101 continue
101 continue
102
102
103 tmp_d = {
103 tmp_d = {
104 'name': dbr.repo_name,
104 'name': dbr.repo_name,
105 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 }
107 }
108 yield tmp_d
108 yield tmp_d
109
109
110
110
111 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
112
112
113 def __init__(
113 def __init__(
114 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
115 extra_kwargs=None):
115 extra_kwargs=None):
116 """
116 """
117 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
118 checking permission for them from perm_set var
118 checking permission for them from perm_set var
119
119
120 :param obj_list: list of db objects
120 :param obj_list: list of db objects
121 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
122 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
123 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
124 """
124 """
125 self.obj_list = obj_list
125 self.obj_list = obj_list
126 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
127 self.perm_set = perm_set
127 self.perm_set = perm_set
128 self.perm_checker = perm_checker
128 self.perm_checker = perm_checker
129 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
130
130
131 def __len__(self):
131 def __len__(self):
132 return len(self.obj_list)
132 return len(self.obj_list)
133
133
134 def __repr__(self):
134 def __repr__(self):
135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136
136
137 def __iter__(self):
137 def __iter__(self):
138 checker = self.perm_checker(*self.perm_set)
138 checker = self.perm_checker(*self.perm_set)
139 for db_obj in self.obj_list:
139 for db_obj in self.obj_list:
140 # check permission at this level
140 # check permission at this level
141 name = getattr(db_obj, self.obj_attr, None)
141 name = getattr(db_obj, self.obj_attr, None)
142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
143 continue
144
144
145 yield db_obj
145 yield db_obj
146
146
147
147
148 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
149
149
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
151 if not perm_set:
152 perm_set = [
152 perm_set = [
153 'repository.read', 'repository.write', 'repository.admin']
153 'repository.read', 'repository.write', 'repository.admin']
154
154
155 super(RepoList, self).__init__(
155 super(RepoList, self).__init__(
156 obj_list=db_repo_list,
156 obj_list=db_repo_list,
157 obj_attr='repo_name', perm_set=perm_set,
157 obj_attr='repo_name', perm_set=perm_set,
158 perm_checker=HasRepoPermissionAny,
158 perm_checker=HasRepoPermissionAny,
159 extra_kwargs=extra_kwargs)
159 extra_kwargs=extra_kwargs)
160
160
161
161
162 class RepoGroupList(_PermCheckIterator):
162 class RepoGroupList(_PermCheckIterator):
163
163
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 if not perm_set:
165 if not perm_set:
166 perm_set = ['group.read', 'group.write', 'group.admin']
166 perm_set = ['group.read', 'group.write', 'group.admin']
167
167
168 super(RepoGroupList, self).__init__(
168 super(RepoGroupList, self).__init__(
169 obj_list=db_repo_group_list,
169 obj_list=db_repo_group_list,
170 obj_attr='group_name', perm_set=perm_set,
170 obj_attr='group_name', perm_set=perm_set,
171 perm_checker=HasRepoGroupPermissionAny,
171 perm_checker=HasRepoGroupPermissionAny,
172 extra_kwargs=extra_kwargs)
172 extra_kwargs=extra_kwargs)
173
173
174
174
175 class UserGroupList(_PermCheckIterator):
175 class UserGroupList(_PermCheckIterator):
176
176
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 if not perm_set:
178 if not perm_set:
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180
180
181 super(UserGroupList, self).__init__(
181 super(UserGroupList, self).__init__(
182 obj_list=db_user_group_list,
182 obj_list=db_user_group_list,
183 obj_attr='users_group_name', perm_set=perm_set,
183 obj_attr='users_group_name', perm_set=perm_set,
184 perm_checker=HasUserGroupPermissionAny,
184 perm_checker=HasUserGroupPermissionAny,
185 extra_kwargs=extra_kwargs)
185 extra_kwargs=extra_kwargs)
186
186
187
187
188 class ScmModel(BaseModel):
188 class ScmModel(BaseModel):
189 """
189 """
190 Generic Scm Model
190 Generic Scm Model
191 """
191 """
192
192
193 @LazyProperty
193 @LazyProperty
194 def repos_path(self):
194 def repos_path(self):
195 """
195 """
196 Gets the repositories root path from database
196 Gets the repositories root path from database
197 """
197 """
198
198
199 settings_model = VcsSettingsModel(sa=self.sa)
199 settings_model = VcsSettingsModel(sa=self.sa)
200 return settings_model.get_repos_location()
200 return settings_model.get_repos_location()
201
201
202 def repo_scan(self, repos_path=None):
202 def repo_scan(self, repos_path=None):
203 """
203 """
204 Listing of repositories in given path. This path should not be a
204 Listing of repositories in given path. This path should not be a
205 repository itself. Return a dictionary of repository objects
205 repository itself. Return a dictionary of repository objects
206
206
207 :param repos_path: path to directory containing repositories
207 :param repos_path: path to directory containing repositories
208 """
208 """
209
209
210 if repos_path is None:
210 if repos_path is None:
211 repos_path = self.repos_path
211 repos_path = self.repos_path
212
212
213 log.info('scanning for repositories in %s', repos_path)
213 log.info('scanning for repositories in %s', repos_path)
214
214
215 config = make_db_config()
215 config = make_db_config()
216 config.set('extensions', 'largefiles', '')
216 config.set('extensions', 'largefiles', '')
217 repos = {}
217 repos = {}
218
218
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 # name need to be decomposed and put back together using the /
220 # name need to be decomposed and put back together using the /
221 # since this is internal storage separator for rhodecode
221 # since this is internal storage separator for rhodecode
222 name = Repository.normalize_repo_name(name)
222 name = Repository.normalize_repo_name(name)
223
223
224 try:
224 try:
225 if name in repos:
225 if name in repos:
226 raise RepositoryError('Duplicate repository name %s '
226 raise RepositoryError('Duplicate repository name %s '
227 'found in %s' % (name, path))
227 'found in %s' % (name, path))
228 elif path[0] in rhodecode.BACKENDS:
228 elif path[0] in rhodecode.BACKENDS:
229 klass = get_backend(path[0])
229 klass = get_backend(path[0])
230 repos[name] = klass(path[1], config=config)
230 repos[name] = klass(path[1], config=config)
231 except OSError:
231 except OSError:
232 continue
232 continue
233 log.debug('found %s paths with repositories', len(repos))
233 log.debug('found %s paths with repositories', len(repos))
234 return repos
234 return repos
235
235
236 def get_repos(self, all_repos=None, sort_key=None):
236 def get_repos(self, all_repos=None, sort_key=None):
237 """
237 """
238 Get all repositories from db and for each repo create it's
238 Get all repositories from db and for each repo create it's
239 backend instance and fill that backed with information from database
239 backend instance and fill that backed with information from database
240
240
241 :param all_repos: list of repository names as strings
241 :param all_repos: list of repository names as strings
242 give specific repositories list, good for filtering
242 give specific repositories list, good for filtering
243
243
244 :param sort_key: initial sorting of repositories
244 :param sort_key: initial sorting of repositories
245 """
245 """
246 if all_repos is None:
246 if all_repos is None:
247 all_repos = self.sa.query(Repository)\
247 all_repos = self.sa.query(Repository)\
248 .filter(Repository.group_id == None)\
248 .filter(Repository.group_id == None)\
249 .order_by(func.lower(Repository.repo_name)).all()
249 .order_by(func.lower(Repository.repo_name)).all()
250 repo_iter = SimpleCachedRepoList(
250 repo_iter = SimpleCachedRepoList(
251 all_repos, repos_path=self.repos_path, order_by=sort_key)
251 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 return repo_iter
252 return repo_iter
253
253
254 def get_repo_groups(self, all_groups=None):
254 def get_repo_groups(self, all_groups=None):
255 if all_groups is None:
255 if all_groups is None:
256 all_groups = RepoGroup.query()\
256 all_groups = RepoGroup.query()\
257 .filter(RepoGroup.group_parent_id == None).all()
257 .filter(RepoGroup.group_parent_id == None).all()
258 return [x for x in RepoGroupList(all_groups)]
258 return [x for x in RepoGroupList(all_groups)]
259
259
260 def mark_for_invalidation(self, repo_name, delete=False):
260 def mark_for_invalidation(self, repo_name, delete=False):
261 """
261 """
262 Mark caches of this repo invalid in the database. `delete` flag
262 Mark caches of this repo invalid in the database. `delete` flag
263 removes the cache entries
263 removes the cache entries
264
264
265 :param repo_name: the repo_name for which caches should be marked
265 :param repo_name: the repo_name for which caches should be marked
266 invalid, or deleted
266 invalid, or deleted
267 :param delete: delete the entry keys instead of setting bool
267 :param delete: delete the entry keys instead of setting bool
268 flag on them, and also purge caches used by the dogpile
268 flag on them, and also purge caches used by the dogpile
269 """
269 """
270 repo = Repository.get_by_repo_name(repo_name)
270 repo = Repository.get_by_repo_name(repo_name)
271
271
272 if repo:
272 if repo:
273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 repo_id=repo.repo_id)
274 repo_id=repo.repo_id)
275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276
276
277 repo_id = repo.repo_id
277 repo_id = repo.repo_id
278 config = repo._config
278 config = repo._config
279 config.set('extensions', 'largefiles', '')
279 config.set('extensions', 'largefiles', '')
280 repo.update_commit_cache(config=config, cs_cache=None)
280 repo.update_commit_cache(config=config, cs_cache=None)
281 if delete:
281 if delete:
282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284
284
285 def toggle_following_repo(self, follow_repo_id, user_id):
285 def toggle_following_repo(self, follow_repo_id, user_id):
286
286
287 f = self.sa.query(UserFollowing)\
287 f = self.sa.query(UserFollowing)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 .filter(UserFollowing.user_id == user_id).scalar()
289 .filter(UserFollowing.user_id == user_id).scalar()
290
290
291 if f is not None:
291 if f is not None:
292 try:
292 try:
293 self.sa.delete(f)
293 self.sa.delete(f)
294 return
294 return
295 except Exception:
295 except Exception:
296 log.error(traceback.format_exc())
296 log.error(traceback.format_exc())
297 raise
297 raise
298
298
299 try:
299 try:
300 f = UserFollowing()
300 f = UserFollowing()
301 f.user_id = user_id
301 f.user_id = user_id
302 f.follows_repo_id = follow_repo_id
302 f.follows_repo_id = follow_repo_id
303 self.sa.add(f)
303 self.sa.add(f)
304 except Exception:
304 except Exception:
305 log.error(traceback.format_exc())
305 log.error(traceback.format_exc())
306 raise
306 raise
307
307
308 def toggle_following_user(self, follow_user_id, user_id):
308 def toggle_following_user(self, follow_user_id, user_id):
309 f = self.sa.query(UserFollowing)\
309 f = self.sa.query(UserFollowing)\
310 .filter(UserFollowing.follows_user_id == follow_user_id)\
310 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 .filter(UserFollowing.user_id == user_id).scalar()
311 .filter(UserFollowing.user_id == user_id).scalar()
312
312
313 if f is not None:
313 if f is not None:
314 try:
314 try:
315 self.sa.delete(f)
315 self.sa.delete(f)
316 return
316 return
317 except Exception:
317 except Exception:
318 log.error(traceback.format_exc())
318 log.error(traceback.format_exc())
319 raise
319 raise
320
320
321 try:
321 try:
322 f = UserFollowing()
322 f = UserFollowing()
323 f.user_id = user_id
323 f.user_id = user_id
324 f.follows_user_id = follow_user_id
324 f.follows_user_id = follow_user_id
325 self.sa.add(f)
325 self.sa.add(f)
326 except Exception:
326 except Exception:
327 log.error(traceback.format_exc())
327 log.error(traceback.format_exc())
328 raise
328 raise
329
329
330 def is_following_repo(self, repo_name, user_id, cache=False):
330 def is_following_repo(self, repo_name, user_id, cache=False):
331 r = self.sa.query(Repository)\
331 r = self.sa.query(Repository)\
332 .filter(Repository.repo_name == repo_name).scalar()
332 .filter(Repository.repo_name == repo_name).scalar()
333
333
334 f = self.sa.query(UserFollowing)\
334 f = self.sa.query(UserFollowing)\
335 .filter(UserFollowing.follows_repository == r)\
335 .filter(UserFollowing.follows_repository == r)\
336 .filter(UserFollowing.user_id == user_id).scalar()
336 .filter(UserFollowing.user_id == user_id).scalar()
337
337
338 return f is not None
338 return f is not None
339
339
340 def is_following_user(self, username, user_id, cache=False):
340 def is_following_user(self, username, user_id, cache=False):
341 u = User.get_by_username(username)
341 u = User.get_by_username(username)
342
342
343 f = self.sa.query(UserFollowing)\
343 f = self.sa.query(UserFollowing)\
344 .filter(UserFollowing.follows_user == u)\
344 .filter(UserFollowing.follows_user == u)\
345 .filter(UserFollowing.user_id == user_id).scalar()
345 .filter(UserFollowing.user_id == user_id).scalar()
346
346
347 return f is not None
347 return f is not None
348
348
349 def get_followers(self, repo):
349 def get_followers(self, repo):
350 repo = self._get_repo(repo)
350 repo = self._get_repo(repo)
351
351
352 return self.sa.query(UserFollowing)\
352 return self.sa.query(UserFollowing)\
353 .filter(UserFollowing.follows_repository == repo).count()
353 .filter(UserFollowing.follows_repository == repo).count()
354
354
355 def get_forks(self, repo):
355 def get_forks(self, repo):
356 repo = self._get_repo(repo)
356 repo = self._get_repo(repo)
357 return self.sa.query(Repository)\
357 return self.sa.query(Repository)\
358 .filter(Repository.fork == repo).count()
358 .filter(Repository.fork == repo).count()
359
359
360 def get_pull_requests(self, repo):
360 def get_pull_requests(self, repo):
361 repo = self._get_repo(repo)
361 repo = self._get_repo(repo)
362 return self.sa.query(PullRequest)\
362 return self.sa.query(PullRequest)\
363 .filter(PullRequest.target_repo == repo)\
363 .filter(PullRequest.target_repo == repo)\
364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365
365
366 def mark_as_fork(self, repo, fork, user):
366 def mark_as_fork(self, repo, fork, user):
367 repo = self._get_repo(repo)
367 repo = self._get_repo(repo)
368 fork = self._get_repo(fork)
368 fork = self._get_repo(fork)
369 if fork and repo.repo_id == fork.repo_id:
369 if fork and repo.repo_id == fork.repo_id:
370 raise Exception("Cannot set repository as fork of itself")
370 raise Exception("Cannot set repository as fork of itself")
371
371
372 if fork and repo.repo_type != fork.repo_type:
372 if fork and repo.repo_type != fork.repo_type:
373 raise RepositoryError(
373 raise RepositoryError(
374 "Cannot set repository as fork of repository with other type")
374 "Cannot set repository as fork of repository with other type")
375
375
376 repo.fork = fork
376 repo.fork = fork
377 self.sa.add(repo)
377 self.sa.add(repo)
378 return repo
378 return repo
379
379
380 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
380 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
381 dbrepo = self._get_repo(repo)
381 dbrepo = self._get_repo(repo)
382 remote_uri = remote_uri or dbrepo.clone_uri
382 remote_uri = remote_uri or dbrepo.clone_uri
383 if not remote_uri:
383 if not remote_uri:
384 raise Exception("This repository doesn't have a clone uri")
384 raise Exception("This repository doesn't have a clone uri")
385
385
386 repo = dbrepo.scm_instance(cache=False)
386 repo = dbrepo.scm_instance(cache=False)
387 repo.config.clear_section('hooks')
387 repo.config.clear_section('hooks')
388
388
389 try:
389 try:
390 # NOTE(marcink): add extra validation so we skip invalid urls
390 # NOTE(marcink): add extra validation so we skip invalid urls
391 # this is due this tasks can be executed via scheduler without
391 # this is due this tasks can be executed via scheduler without
392 # proper validation of remote_uri
392 # proper validation of remote_uri
393 if validate_uri:
393 if validate_uri:
394 config = make_db_config(clear_session=False)
394 config = make_db_config(clear_session=False)
395 url_validator(remote_uri, dbrepo.repo_type, config)
395 url_validator(remote_uri, dbrepo.repo_type, config)
396 except InvalidCloneUrl:
396 except InvalidCloneUrl:
397 raise
397 raise
398
398
399 repo_name = dbrepo.repo_name
399 repo_name = dbrepo.repo_name
400 try:
400 try:
401 # TODO: we need to make sure those operations call proper hooks !
401 # TODO: we need to make sure those operations call proper hooks !
402 repo.pull(remote_uri)
402 repo.fetch(remote_uri)
403
403
404 self.mark_for_invalidation(repo_name)
404 self.mark_for_invalidation(repo_name)
405 except Exception:
405 except Exception:
406 log.error(traceback.format_exc())
406 log.error(traceback.format_exc())
407 raise
407 raise
408
408
409 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
409 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
410 dbrepo = self._get_repo(repo)
410 dbrepo = self._get_repo(repo)
411 remote_uri = remote_uri or dbrepo.push_uri
411 remote_uri = remote_uri or dbrepo.push_uri
412 if not remote_uri:
412 if not remote_uri:
413 raise Exception("This repository doesn't have a clone uri")
413 raise Exception("This repository doesn't have a clone uri")
414
414
415 repo = dbrepo.scm_instance(cache=False)
415 repo = dbrepo.scm_instance(cache=False)
416 repo.config.clear_section('hooks')
416 repo.config.clear_section('hooks')
417
417
418 try:
418 try:
419 # NOTE(marcink): add extra validation so we skip invalid urls
419 # NOTE(marcink): add extra validation so we skip invalid urls
420 # this is due this tasks can be executed via scheduler without
420 # this is due this tasks can be executed via scheduler without
421 # proper validation of remote_uri
421 # proper validation of remote_uri
422 if validate_uri:
422 if validate_uri:
423 config = make_db_config(clear_session=False)
423 config = make_db_config(clear_session=False)
424 url_validator(remote_uri, dbrepo.repo_type, config)
424 url_validator(remote_uri, dbrepo.repo_type, config)
425 except InvalidCloneUrl:
425 except InvalidCloneUrl:
426 raise
426 raise
427
427
428 try:
428 try:
429 repo.push(remote_uri)
429 repo.push(remote_uri)
430 except Exception:
430 except Exception:
431 log.error(traceback.format_exc())
431 log.error(traceback.format_exc())
432 raise
432 raise
433
433
434 def commit_change(self, repo, repo_name, commit, user, author, message,
434 def commit_change(self, repo, repo_name, commit, user, author, message,
435 content, f_path):
435 content, f_path):
436 """
436 """
437 Commits changes
437 Commits changes
438
438
439 :param repo: SCM instance
439 :param repo: SCM instance
440
440
441 """
441 """
442 user = self._get_user(user)
442 user = self._get_user(user)
443
443
444 # decoding here will force that we have proper encoded values
444 # decoding here will force that we have proper encoded values
445 # in any other case this will throw exceptions and deny commit
445 # in any other case this will throw exceptions and deny commit
446 content = safe_str(content)
446 content = safe_str(content)
447 path = safe_str(f_path)
447 path = safe_str(f_path)
448 # message and author needs to be unicode
448 # message and author needs to be unicode
449 # proper backend should then translate that into required type
449 # proper backend should then translate that into required type
450 message = safe_unicode(message)
450 message = safe_unicode(message)
451 author = safe_unicode(author)
451 author = safe_unicode(author)
452 imc = repo.in_memory_commit
452 imc = repo.in_memory_commit
453 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
453 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
454 try:
454 try:
455 # TODO: handle pre-push action !
455 # TODO: handle pre-push action !
456 tip = imc.commit(
456 tip = imc.commit(
457 message=message, author=author, parents=[commit],
457 message=message, author=author, parents=[commit],
458 branch=commit.branch)
458 branch=commit.branch)
459 except Exception as e:
459 except Exception as e:
460 log.error(traceback.format_exc())
460 log.error(traceback.format_exc())
461 raise IMCCommitError(str(e))
461 raise IMCCommitError(str(e))
462 finally:
462 finally:
463 # always clear caches, if commit fails we want fresh object also
463 # always clear caches, if commit fails we want fresh object also
464 self.mark_for_invalidation(repo_name)
464 self.mark_for_invalidation(repo_name)
465
465
466 # We trigger the post-push action
466 # We trigger the post-push action
467 hooks_utils.trigger_post_push_hook(
467 hooks_utils.trigger_post_push_hook(
468 username=user.username, action='push_local', repo_name=repo_name,
468 username=user.username, action='push_local', repo_name=repo_name,
469 repo_alias=repo.alias, commit_ids=[tip.raw_id])
469 repo_alias=repo.alias, commit_ids=[tip.raw_id])
470 return tip
470 return tip
471
471
472 def _sanitize_path(self, f_path):
472 def _sanitize_path(self, f_path):
473 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
473 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
474 raise NonRelativePathError('%s is not an relative path' % f_path)
474 raise NonRelativePathError('%s is not an relative path' % f_path)
475 if f_path:
475 if f_path:
476 f_path = os.path.normpath(f_path)
476 f_path = os.path.normpath(f_path)
477 return f_path
477 return f_path
478
478
479 def get_dirnode_metadata(self, request, commit, dir_node):
479 def get_dirnode_metadata(self, request, commit, dir_node):
480 if not dir_node.is_dir():
480 if not dir_node.is_dir():
481 return []
481 return []
482
482
483 data = []
483 data = []
484 for node in dir_node:
484 for node in dir_node:
485 if not node.is_file():
485 if not node.is_file():
486 # we skip file-nodes
486 # we skip file-nodes
487 continue
487 continue
488
488
489 last_commit = node.last_commit
489 last_commit = node.last_commit
490 last_commit_date = last_commit.date
490 last_commit_date = last_commit.date
491 data.append({
491 data.append({
492 'name': node.name,
492 'name': node.name,
493 'size': h.format_byte_size_binary(node.size),
493 'size': h.format_byte_size_binary(node.size),
494 'modified_at': h.format_date(last_commit_date),
494 'modified_at': h.format_date(last_commit_date),
495 'modified_ts': last_commit_date.isoformat(),
495 'modified_ts': last_commit_date.isoformat(),
496 'revision': last_commit.revision,
496 'revision': last_commit.revision,
497 'short_id': last_commit.short_id,
497 'short_id': last_commit.short_id,
498 'message': h.escape(last_commit.message),
498 'message': h.escape(last_commit.message),
499 'author': h.escape(last_commit.author),
499 'author': h.escape(last_commit.author),
500 'user_profile': h.gravatar_with_user(
500 'user_profile': h.gravatar_with_user(
501 request, last_commit.author),
501 request, last_commit.author),
502 })
502 })
503
503
504 return data
504 return data
505
505
506 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
506 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
507 extended_info=False, content=False, max_file_bytes=None):
507 extended_info=False, content=False, max_file_bytes=None):
508 """
508 """
509 recursive walk in root dir and return a set of all path in that dir
509 recursive walk in root dir and return a set of all path in that dir
510 based on repository walk function
510 based on repository walk function
511
511
512 :param repo_name: name of repository
512 :param repo_name: name of repository
513 :param commit_id: commit id for which to list nodes
513 :param commit_id: commit id for which to list nodes
514 :param root_path: root path to list
514 :param root_path: root path to list
515 :param flat: return as a list, if False returns a dict with description
515 :param flat: return as a list, if False returns a dict with description
516 :param max_file_bytes: will not return file contents over this limit
516 :param max_file_bytes: will not return file contents over this limit
517
517
518 """
518 """
519 _files = list()
519 _files = list()
520 _dirs = list()
520 _dirs = list()
521 try:
521 try:
522 _repo = self._get_repo(repo_name)
522 _repo = self._get_repo(repo_name)
523 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
523 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
524 root_path = root_path.lstrip('/')
524 root_path = root_path.lstrip('/')
525 for __, dirs, files in commit.walk(root_path):
525 for __, dirs, files in commit.walk(root_path):
526 for f in files:
526 for f in files:
527 _content = None
527 _content = None
528 _data = f.unicode_path
528 _data = f.unicode_path
529 over_size_limit = (max_file_bytes is not None
529 over_size_limit = (max_file_bytes is not None
530 and f.size > max_file_bytes)
530 and f.size > max_file_bytes)
531
531
532 if not flat:
532 if not flat:
533 _data = {
533 _data = {
534 "name": h.escape(f.unicode_path),
534 "name": h.escape(f.unicode_path),
535 "type": "file",
535 "type": "file",
536 }
536 }
537 if extended_info:
537 if extended_info:
538 _data.update({
538 _data.update({
539 "md5": f.md5,
539 "md5": f.md5,
540 "binary": f.is_binary,
540 "binary": f.is_binary,
541 "size": f.size,
541 "size": f.size,
542 "extension": f.extension,
542 "extension": f.extension,
543 "mimetype": f.mimetype,
543 "mimetype": f.mimetype,
544 "lines": f.lines()[0]
544 "lines": f.lines()[0]
545 })
545 })
546
546
547 if content:
547 if content:
548 full_content = None
548 full_content = None
549 if not f.is_binary and not over_size_limit:
549 if not f.is_binary and not over_size_limit:
550 full_content = safe_str(f.content)
550 full_content = safe_str(f.content)
551
551
552 _data.update({
552 _data.update({
553 "content": full_content,
553 "content": full_content,
554 })
554 })
555 _files.append(_data)
555 _files.append(_data)
556 for d in dirs:
556 for d in dirs:
557 _data = d.unicode_path
557 _data = d.unicode_path
558 if not flat:
558 if not flat:
559 _data = {
559 _data = {
560 "name": h.escape(d.unicode_path),
560 "name": h.escape(d.unicode_path),
561 "type": "dir",
561 "type": "dir",
562 }
562 }
563 if extended_info:
563 if extended_info:
564 _data.update({
564 _data.update({
565 "md5": None,
565 "md5": None,
566 "binary": None,
566 "binary": None,
567 "size": None,
567 "size": None,
568 "extension": None,
568 "extension": None,
569 })
569 })
570 if content:
570 if content:
571 _data.update({
571 _data.update({
572 "content": None
572 "content": None
573 })
573 })
574 _dirs.append(_data)
574 _dirs.append(_data)
575 except RepositoryError:
575 except RepositoryError:
576 log.debug("Exception in get_nodes", exc_info=True)
576 log.debug("Exception in get_nodes", exc_info=True)
577 raise
577 raise
578
578
579 return _dirs, _files
579 return _dirs, _files
580
580
581 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
581 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
582 author=None, trigger_push_hook=True):
582 author=None, trigger_push_hook=True):
583 """
583 """
584 Commits given multiple nodes into repo
584 Commits given multiple nodes into repo
585
585
586 :param user: RhodeCode User object or user_id, the commiter
586 :param user: RhodeCode User object or user_id, the commiter
587 :param repo: RhodeCode Repository object
587 :param repo: RhodeCode Repository object
588 :param message: commit message
588 :param message: commit message
589 :param nodes: mapping {filename:{'content':content},...}
589 :param nodes: mapping {filename:{'content':content},...}
590 :param parent_commit: parent commit, can be empty than it's
590 :param parent_commit: parent commit, can be empty than it's
591 initial commit
591 initial commit
592 :param author: author of commit, cna be different that commiter
592 :param author: author of commit, cna be different that commiter
593 only for git
593 only for git
594 :param trigger_push_hook: trigger push hooks
594 :param trigger_push_hook: trigger push hooks
595
595
596 :returns: new commited commit
596 :returns: new commited commit
597 """
597 """
598
598
599 user = self._get_user(user)
599 user = self._get_user(user)
600 scm_instance = repo.scm_instance(cache=False)
600 scm_instance = repo.scm_instance(cache=False)
601
601
602 processed_nodes = []
602 processed_nodes = []
603 for f_path in nodes:
603 for f_path in nodes:
604 f_path = self._sanitize_path(f_path)
604 f_path = self._sanitize_path(f_path)
605 content = nodes[f_path]['content']
605 content = nodes[f_path]['content']
606 f_path = safe_str(f_path)
606 f_path = safe_str(f_path)
607 # decoding here will force that we have proper encoded values
607 # decoding here will force that we have proper encoded values
608 # in any other case this will throw exceptions and deny commit
608 # in any other case this will throw exceptions and deny commit
609 if isinstance(content, (basestring,)):
609 if isinstance(content, (basestring,)):
610 content = safe_str(content)
610 content = safe_str(content)
611 elif isinstance(content, (file, cStringIO.OutputType,)):
611 elif isinstance(content, (file, cStringIO.OutputType,)):
612 content = content.read()
612 content = content.read()
613 else:
613 else:
614 raise Exception('Content is of unrecognized type %s' % (
614 raise Exception('Content is of unrecognized type %s' % (
615 type(content)
615 type(content)
616 ))
616 ))
617 processed_nodes.append((f_path, content))
617 processed_nodes.append((f_path, content))
618
618
619 message = safe_unicode(message)
619 message = safe_unicode(message)
620 commiter = user.full_contact
620 commiter = user.full_contact
621 author = safe_unicode(author) if author else commiter
621 author = safe_unicode(author) if author else commiter
622
622
623 imc = scm_instance.in_memory_commit
623 imc = scm_instance.in_memory_commit
624
624
625 if not parent_commit:
625 if not parent_commit:
626 parent_commit = EmptyCommit(alias=scm_instance.alias)
626 parent_commit = EmptyCommit(alias=scm_instance.alias)
627
627
628 if isinstance(parent_commit, EmptyCommit):
628 if isinstance(parent_commit, EmptyCommit):
629 # EmptyCommit means we we're editing empty repository
629 # EmptyCommit means we we're editing empty repository
630 parents = None
630 parents = None
631 else:
631 else:
632 parents = [parent_commit]
632 parents = [parent_commit]
633 # add multiple nodes
633 # add multiple nodes
634 for path, content in processed_nodes:
634 for path, content in processed_nodes:
635 imc.add(FileNode(path, content=content))
635 imc.add(FileNode(path, content=content))
636 # TODO: handle pre push scenario
636 # TODO: handle pre push scenario
637 tip = imc.commit(message=message,
637 tip = imc.commit(message=message,
638 author=author,
638 author=author,
639 parents=parents,
639 parents=parents,
640 branch=parent_commit.branch)
640 branch=parent_commit.branch)
641
641
642 self.mark_for_invalidation(repo.repo_name)
642 self.mark_for_invalidation(repo.repo_name)
643 if trigger_push_hook:
643 if trigger_push_hook:
644 hooks_utils.trigger_post_push_hook(
644 hooks_utils.trigger_post_push_hook(
645 username=user.username, action='push_local',
645 username=user.username, action='push_local',
646 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
646 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
647 commit_ids=[tip.raw_id])
647 commit_ids=[tip.raw_id])
648 return tip
648 return tip
649
649
650 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
650 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
651 author=None, trigger_push_hook=True):
651 author=None, trigger_push_hook=True):
652 user = self._get_user(user)
652 user = self._get_user(user)
653 scm_instance = repo.scm_instance(cache=False)
653 scm_instance = repo.scm_instance(cache=False)
654
654
655 message = safe_unicode(message)
655 message = safe_unicode(message)
656 commiter = user.full_contact
656 commiter = user.full_contact
657 author = safe_unicode(author) if author else commiter
657 author = safe_unicode(author) if author else commiter
658
658
659 imc = scm_instance.in_memory_commit
659 imc = scm_instance.in_memory_commit
660
660
661 if not parent_commit:
661 if not parent_commit:
662 parent_commit = EmptyCommit(alias=scm_instance.alias)
662 parent_commit = EmptyCommit(alias=scm_instance.alias)
663
663
664 if isinstance(parent_commit, EmptyCommit):
664 if isinstance(parent_commit, EmptyCommit):
665 # EmptyCommit means we we're editing empty repository
665 # EmptyCommit means we we're editing empty repository
666 parents = None
666 parents = None
667 else:
667 else:
668 parents = [parent_commit]
668 parents = [parent_commit]
669
669
670 # add multiple nodes
670 # add multiple nodes
671 for _filename, data in nodes.items():
671 for _filename, data in nodes.items():
672 # new filename, can be renamed from the old one, also sanitaze
672 # new filename, can be renamed from the old one, also sanitaze
673 # the path for any hack around relative paths like ../../ etc.
673 # the path for any hack around relative paths like ../../ etc.
674 filename = self._sanitize_path(data['filename'])
674 filename = self._sanitize_path(data['filename'])
675 old_filename = self._sanitize_path(_filename)
675 old_filename = self._sanitize_path(_filename)
676 content = data['content']
676 content = data['content']
677
677
678 filenode = FileNode(old_filename, content=content)
678 filenode = FileNode(old_filename, content=content)
679 op = data['op']
679 op = data['op']
680 if op == 'add':
680 if op == 'add':
681 imc.add(filenode)
681 imc.add(filenode)
682 elif op == 'del':
682 elif op == 'del':
683 imc.remove(filenode)
683 imc.remove(filenode)
684 elif op == 'mod':
684 elif op == 'mod':
685 if filename != old_filename:
685 if filename != old_filename:
686 # TODO: handle renames more efficient, needs vcs lib
686 # TODO: handle renames more efficient, needs vcs lib
687 # changes
687 # changes
688 imc.remove(filenode)
688 imc.remove(filenode)
689 imc.add(FileNode(filename, content=content))
689 imc.add(FileNode(filename, content=content))
690 else:
690 else:
691 imc.change(filenode)
691 imc.change(filenode)
692
692
693 try:
693 try:
694 # TODO: handle pre push scenario
694 # TODO: handle pre push scenario
695 # commit changes
695 # commit changes
696 tip = imc.commit(message=message,
696 tip = imc.commit(message=message,
697 author=author,
697 author=author,
698 parents=parents,
698 parents=parents,
699 branch=parent_commit.branch)
699 branch=parent_commit.branch)
700 except NodeNotChangedError:
700 except NodeNotChangedError:
701 raise
701 raise
702 except Exception as e:
702 except Exception as e:
703 log.exception("Unexpected exception during call to imc.commit")
703 log.exception("Unexpected exception during call to imc.commit")
704 raise IMCCommitError(str(e))
704 raise IMCCommitError(str(e))
705 finally:
705 finally:
706 # always clear caches, if commit fails we want fresh object also
706 # always clear caches, if commit fails we want fresh object also
707 self.mark_for_invalidation(repo.repo_name)
707 self.mark_for_invalidation(repo.repo_name)
708
708
709 if trigger_push_hook:
709 if trigger_push_hook:
710 hooks_utils.trigger_post_push_hook(
710 hooks_utils.trigger_post_push_hook(
711 username=user.username, action='push_local',
711 username=user.username, action='push_local',
712 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
712 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
713 commit_ids=[tip.raw_id])
713 commit_ids=[tip.raw_id])
714
714
715 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
715 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
716 author=None, trigger_push_hook=True):
716 author=None, trigger_push_hook=True):
717 """
717 """
718 Deletes given multiple nodes into `repo`
718 Deletes given multiple nodes into `repo`
719
719
720 :param user: RhodeCode User object or user_id, the committer
720 :param user: RhodeCode User object or user_id, the committer
721 :param repo: RhodeCode Repository object
721 :param repo: RhodeCode Repository object
722 :param message: commit message
722 :param message: commit message
723 :param nodes: mapping {filename:{'content':content},...}
723 :param nodes: mapping {filename:{'content':content},...}
724 :param parent_commit: parent commit, can be empty than it's initial
724 :param parent_commit: parent commit, can be empty than it's initial
725 commit
725 commit
726 :param author: author of commit, cna be different that commiter only
726 :param author: author of commit, cna be different that commiter only
727 for git
727 for git
728 :param trigger_push_hook: trigger push hooks
728 :param trigger_push_hook: trigger push hooks
729
729
730 :returns: new commit after deletion
730 :returns: new commit after deletion
731 """
731 """
732
732
733 user = self._get_user(user)
733 user = self._get_user(user)
734 scm_instance = repo.scm_instance(cache=False)
734 scm_instance = repo.scm_instance(cache=False)
735
735
736 processed_nodes = []
736 processed_nodes = []
737 for f_path in nodes:
737 for f_path in nodes:
738 f_path = self._sanitize_path(f_path)
738 f_path = self._sanitize_path(f_path)
739 # content can be empty but for compatabilty it allows same dicts
739 # content can be empty but for compatabilty it allows same dicts
740 # structure as add_nodes
740 # structure as add_nodes
741 content = nodes[f_path].get('content')
741 content = nodes[f_path].get('content')
742 processed_nodes.append((f_path, content))
742 processed_nodes.append((f_path, content))
743
743
744 message = safe_unicode(message)
744 message = safe_unicode(message)
745 commiter = user.full_contact
745 commiter = user.full_contact
746 author = safe_unicode(author) if author else commiter
746 author = safe_unicode(author) if author else commiter
747
747
748 imc = scm_instance.in_memory_commit
748 imc = scm_instance.in_memory_commit
749
749
750 if not parent_commit:
750 if not parent_commit:
751 parent_commit = EmptyCommit(alias=scm_instance.alias)
751 parent_commit = EmptyCommit(alias=scm_instance.alias)
752
752
753 if isinstance(parent_commit, EmptyCommit):
753 if isinstance(parent_commit, EmptyCommit):
754 # EmptyCommit means we we're editing empty repository
754 # EmptyCommit means we we're editing empty repository
755 parents = None
755 parents = None
756 else:
756 else:
757 parents = [parent_commit]
757 parents = [parent_commit]
758 # add multiple nodes
758 # add multiple nodes
759 for path, content in processed_nodes:
759 for path, content in processed_nodes:
760 imc.remove(FileNode(path, content=content))
760 imc.remove(FileNode(path, content=content))
761
761
762 # TODO: handle pre push scenario
762 # TODO: handle pre push scenario
763 tip = imc.commit(message=message,
763 tip = imc.commit(message=message,
764 author=author,
764 author=author,
765 parents=parents,
765 parents=parents,
766 branch=parent_commit.branch)
766 branch=parent_commit.branch)
767
767
768 self.mark_for_invalidation(repo.repo_name)
768 self.mark_for_invalidation(repo.repo_name)
769 if trigger_push_hook:
769 if trigger_push_hook:
770 hooks_utils.trigger_post_push_hook(
770 hooks_utils.trigger_post_push_hook(
771 username=user.username, action='push_local',
771 username=user.username, action='push_local',
772 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
772 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
773 commit_ids=[tip.raw_id])
773 commit_ids=[tip.raw_id])
774 return tip
774 return tip
775
775
776 def strip(self, repo, commit_id, branch):
776 def strip(self, repo, commit_id, branch):
777 scm_instance = repo.scm_instance(cache=False)
777 scm_instance = repo.scm_instance(cache=False)
778 scm_instance.config.clear_section('hooks')
778 scm_instance.config.clear_section('hooks')
779 scm_instance.strip(commit_id, branch)
779 scm_instance.strip(commit_id, branch)
780 self.mark_for_invalidation(repo.repo_name)
780 self.mark_for_invalidation(repo.repo_name)
781
781
782 def get_unread_journal(self):
782 def get_unread_journal(self):
783 return self.sa.query(UserLog).count()
783 return self.sa.query(UserLog).count()
784
784
785 def get_repo_landing_revs(self, translator, repo=None):
785 def get_repo_landing_revs(self, translator, repo=None):
786 """
786 """
787 Generates select option with tags branches and bookmarks (for hg only)
787 Generates select option with tags branches and bookmarks (for hg only)
788 grouped by type
788 grouped by type
789
789
790 :param repo:
790 :param repo:
791 """
791 """
792 _ = translator
792 _ = translator
793 repo = self._get_repo(repo)
793 repo = self._get_repo(repo)
794
794
795 hist_l = [
795 hist_l = [
796 ['rev:tip', _('latest tip')]
796 ['rev:tip', _('latest tip')]
797 ]
797 ]
798 choices = [
798 choices = [
799 'rev:tip'
799 'rev:tip'
800 ]
800 ]
801
801
802 if not repo:
802 if not repo:
803 return choices, hist_l
803 return choices, hist_l
804
804
805 repo = repo.scm_instance()
805 repo = repo.scm_instance()
806
806
807 branches_group = (
807 branches_group = (
808 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
808 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
809 for b in repo.branches],
809 for b in repo.branches],
810 _("Branches"))
810 _("Branches"))
811 hist_l.append(branches_group)
811 hist_l.append(branches_group)
812 choices.extend([x[0] for x in branches_group[0]])
812 choices.extend([x[0] for x in branches_group[0]])
813
813
814 if repo.alias == 'hg':
814 if repo.alias == 'hg':
815 bookmarks_group = (
815 bookmarks_group = (
816 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
816 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
817 for b in repo.bookmarks],
817 for b in repo.bookmarks],
818 _("Bookmarks"))
818 _("Bookmarks"))
819 hist_l.append(bookmarks_group)
819 hist_l.append(bookmarks_group)
820 choices.extend([x[0] for x in bookmarks_group[0]])
820 choices.extend([x[0] for x in bookmarks_group[0]])
821
821
822 tags_group = (
822 tags_group = (
823 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
823 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
824 for t in repo.tags],
824 for t in repo.tags],
825 _("Tags"))
825 _("Tags"))
826 hist_l.append(tags_group)
826 hist_l.append(tags_group)
827 choices.extend([x[0] for x in tags_group[0]])
827 choices.extend([x[0] for x in tags_group[0]])
828
828
829 return choices, hist_l
829 return choices, hist_l
830
830
831 def get_server_info(self, environ=None):
831 def get_server_info(self, environ=None):
832 server_info = get_system_info(environ)
832 server_info = get_system_info(environ)
833 return server_info
833 return server_info
@@ -1,1865 +1,1867 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33 import functools
33 import functools
34
34
35 import mock
35 import mock
36 import pyramid.testing
36 import pyramid.testing
37 import pytest
37 import pytest
38 import colander
38 import colander
39 import requests
39 import requests
40 import pyramid.paster
40 import pyramid.paster
41
41
42 import rhodecode
42 import rhodecode
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.db import (
46 from rhodecode.model.db import (
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
56 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.model.integration import IntegrationModel
57 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations import integration_type_registry
58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.vcs import create_vcsserver_proxy
60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.backends import get_backend
62 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.lib.vcs.nodes import FileNode
63 from rhodecode.tests import (
63 from rhodecode.tests import (
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 TEST_USER_REGULAR_PASS)
66 TEST_USER_REGULAR_PASS)
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 from rhodecode.tests.fixture import Fixture
68 from rhodecode.tests.fixture import Fixture
69 from rhodecode.config import utils as config_utils
69 from rhodecode.config import utils as config_utils
70
70
71 def _split_comma(value):
71 def _split_comma(value):
72 return value.split(',')
72 return value.split(',')
73
73
74
74
75 def pytest_addoption(parser):
75 def pytest_addoption(parser):
76 parser.addoption(
76 parser.addoption(
77 '--keep-tmp-path', action='store_true',
77 '--keep-tmp-path', action='store_true',
78 help="Keep the test temporary directories")
78 help="Keep the test temporary directories")
79 parser.addoption(
79 parser.addoption(
80 '--backends', action='store', type=_split_comma,
80 '--backends', action='store', type=_split_comma,
81 default=['git', 'hg', 'svn'],
81 default=['git', 'hg', 'svn'],
82 help="Select which backends to test for backend specific tests.")
82 help="Select which backends to test for backend specific tests.")
83 parser.addoption(
83 parser.addoption(
84 '--dbs', action='store', type=_split_comma,
84 '--dbs', action='store', type=_split_comma,
85 default=['sqlite'],
85 default=['sqlite'],
86 help="Select which database to test for database specific tests. "
86 help="Select which database to test for database specific tests. "
87 "Possible options are sqlite,postgres,mysql")
87 "Possible options are sqlite,postgres,mysql")
88 parser.addoption(
88 parser.addoption(
89 '--appenlight', '--ae', action='store_true',
89 '--appenlight', '--ae', action='store_true',
90 help="Track statistics in appenlight.")
90 help="Track statistics in appenlight.")
91 parser.addoption(
91 parser.addoption(
92 '--appenlight-api-key', '--ae-key',
92 '--appenlight-api-key', '--ae-key',
93 help="API key for Appenlight.")
93 help="API key for Appenlight.")
94 parser.addoption(
94 parser.addoption(
95 '--appenlight-url', '--ae-url',
95 '--appenlight-url', '--ae-url',
96 default="https://ae.rhodecode.com",
96 default="https://ae.rhodecode.com",
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 parser.addoption(
98 parser.addoption(
99 '--sqlite-connection-string', action='store',
99 '--sqlite-connection-string', action='store',
100 default='', help="Connection string for the dbs tests with SQLite")
100 default='', help="Connection string for the dbs tests with SQLite")
101 parser.addoption(
101 parser.addoption(
102 '--postgres-connection-string', action='store',
102 '--postgres-connection-string', action='store',
103 default='', help="Connection string for the dbs tests with Postgres")
103 default='', help="Connection string for the dbs tests with Postgres")
104 parser.addoption(
104 parser.addoption(
105 '--mysql-connection-string', action='store',
105 '--mysql-connection-string', action='store',
106 default='', help="Connection string for the dbs tests with MySQL")
106 default='', help="Connection string for the dbs tests with MySQL")
107 parser.addoption(
107 parser.addoption(
108 '--repeat', type=int, default=100,
108 '--repeat', type=int, default=100,
109 help="Number of repetitions in performance tests.")
109 help="Number of repetitions in performance tests.")
110
110
111
111
112 def pytest_configure(config):
112 def pytest_configure(config):
113 from rhodecode.config import patches
113 from rhodecode.config import patches
114
114
115
115
116 def pytest_collection_modifyitems(session, config, items):
116 def pytest_collection_modifyitems(session, config, items):
117 # nottest marked, compare nose, used for transition from nose to pytest
117 # nottest marked, compare nose, used for transition from nose to pytest
118 remaining = [
118 remaining = [
119 i for i in items if getattr(i.obj, '__test__', True)]
119 i for i in items if getattr(i.obj, '__test__', True)]
120 items[:] = remaining
120 items[:] = remaining
121
121
122
122
123 def pytest_generate_tests(metafunc):
123 def pytest_generate_tests(metafunc):
124 # Support test generation based on --backend parameter
124 # Support test generation based on --backend parameter
125 if 'backend_alias' in metafunc.fixturenames:
125 if 'backend_alias' in metafunc.fixturenames:
126 backends = get_backends_from_metafunc(metafunc)
126 backends = get_backends_from_metafunc(metafunc)
127 scope = None
127 scope = None
128 if not backends:
128 if not backends:
129 pytest.skip("Not enabled for any of selected backends")
129 pytest.skip("Not enabled for any of selected backends")
130 metafunc.parametrize('backend_alias', backends, scope=scope)
130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 elif hasattr(metafunc.function, 'backends'):
131 elif hasattr(metafunc.function, 'backends'):
132 backends = get_backends_from_metafunc(metafunc)
132 backends = get_backends_from_metafunc(metafunc)
133 if not backends:
133 if not backends:
134 pytest.skip("Not enabled for any of selected backends")
134 pytest.skip("Not enabled for any of selected backends")
135
135
136
136
137 def get_backends_from_metafunc(metafunc):
137 def get_backends_from_metafunc(metafunc):
138 requested_backends = set(metafunc.config.getoption('--backends'))
138 requested_backends = set(metafunc.config.getoption('--backends'))
139 if hasattr(metafunc.function, 'backends'):
139 if hasattr(metafunc.function, 'backends'):
140 # Supported backends by this test function, created from
140 # Supported backends by this test function, created from
141 # pytest.mark.backends
141 # pytest.mark.backends
142 backends = metafunc.function.backends.args
142 backends = metafunc.function.backends.args
143 elif hasattr(metafunc.cls, 'backend_alias'):
143 elif hasattr(metafunc.cls, 'backend_alias'):
144 # Support class attribute "backend_alias", this is mainly
144 # Support class attribute "backend_alias", this is mainly
145 # for legacy reasons for tests not yet using pytest.mark.backends
145 # for legacy reasons for tests not yet using pytest.mark.backends
146 backends = [metafunc.cls.backend_alias]
146 backends = [metafunc.cls.backend_alias]
147 else:
147 else:
148 backends = metafunc.config.getoption('--backends')
148 backends = metafunc.config.getoption('--backends')
149 return requested_backends.intersection(backends)
149 return requested_backends.intersection(backends)
150
150
151
151
152 @pytest.fixture(scope='session', autouse=True)
152 @pytest.fixture(scope='session', autouse=True)
153 def activate_example_rcextensions(request):
153 def activate_example_rcextensions(request):
154 """
154 """
155 Patch in an example rcextensions module which verifies passed in kwargs.
155 Patch in an example rcextensions module which verifies passed in kwargs.
156 """
156 """
157 from rhodecode.tests.other import example_rcextensions
157 from rhodecode.tests.other import example_rcextensions
158
158
159 old_extensions = rhodecode.EXTENSIONS
159 old_extensions = rhodecode.EXTENSIONS
160 rhodecode.EXTENSIONS = example_rcextensions
160 rhodecode.EXTENSIONS = example_rcextensions
161
161
162 @request.addfinalizer
162 @request.addfinalizer
163 def cleanup():
163 def cleanup():
164 rhodecode.EXTENSIONS = old_extensions
164 rhodecode.EXTENSIONS = old_extensions
165
165
166
166
167 @pytest.fixture
167 @pytest.fixture
168 def capture_rcextensions():
168 def capture_rcextensions():
169 """
169 """
170 Returns the recorded calls to entry points in rcextensions.
170 Returns the recorded calls to entry points in rcextensions.
171 """
171 """
172 calls = rhodecode.EXTENSIONS.calls
172 calls = rhodecode.EXTENSIONS.calls
173 calls.clear()
173 calls.clear()
174 # Note: At this moment, it is still the empty dict, but that will
174 # Note: At this moment, it is still the empty dict, but that will
175 # be filled during the test run and since it is a reference this
175 # be filled during the test run and since it is a reference this
176 # is enough to make it work.
176 # is enough to make it work.
177 return calls
177 return calls
178
178
179
179
180 @pytest.fixture(scope='session')
180 @pytest.fixture(scope='session')
181 def http_environ_session():
181 def http_environ_session():
182 """
182 """
183 Allow to use "http_environ" in session scope.
183 Allow to use "http_environ" in session scope.
184 """
184 """
185 return http_environ(
185 return http_environ(
186 http_host_stub=http_host_stub())
186 http_host_stub=http_host_stub())
187
187
188
188
189 @pytest.fixture
189 @pytest.fixture
190 def http_host_stub():
190 def http_host_stub():
191 """
191 """
192 Value of HTTP_HOST in the test run.
192 Value of HTTP_HOST in the test run.
193 """
193 """
194 return 'example.com:80'
194 return 'example.com:80'
195
195
196
196
197 @pytest.fixture
197 @pytest.fixture
198 def http_host_only_stub():
198 def http_host_only_stub():
199 """
199 """
200 Value of HTTP_HOST in the test run.
200 Value of HTTP_HOST in the test run.
201 """
201 """
202 return http_host_stub().split(':')[0]
202 return http_host_stub().split(':')[0]
203
203
204
204
205 @pytest.fixture
205 @pytest.fixture
206 def http_environ(http_host_stub):
206 def http_environ(http_host_stub):
207 """
207 """
208 HTTP extra environ keys.
208 HTTP extra environ keys.
209
209
210 User by the test application and as well for setting up the pylons
210 User by the test application and as well for setting up the pylons
211 environment. In the case of the fixture "app" it should be possible
211 environment. In the case of the fixture "app" it should be possible
212 to override this for a specific test case.
212 to override this for a specific test case.
213 """
213 """
214 return {
214 return {
215 'SERVER_NAME': http_host_only_stub(),
215 'SERVER_NAME': http_host_only_stub(),
216 'SERVER_PORT': http_host_stub.split(':')[1],
216 'SERVER_PORT': http_host_stub.split(':')[1],
217 'HTTP_HOST': http_host_stub,
217 'HTTP_HOST': http_host_stub,
218 'HTTP_USER_AGENT': 'rc-test-agent',
218 'HTTP_USER_AGENT': 'rc-test-agent',
219 'REQUEST_METHOD': 'GET'
219 'REQUEST_METHOD': 'GET'
220 }
220 }
221
221
222
222
223 @pytest.fixture(scope='session')
223 @pytest.fixture(scope='session')
224 def baseapp(ini_config, vcsserver, http_environ_session):
224 def baseapp(ini_config, vcsserver, http_environ_session):
225 from rhodecode.lib.pyramid_utils import get_app_config
225 from rhodecode.lib.pyramid_utils import get_app_config
226 from rhodecode.config.middleware import make_pyramid_app
226 from rhodecode.config.middleware import make_pyramid_app
227
227
228 print("Using the RhodeCode configuration:{}".format(ini_config))
228 print("Using the RhodeCode configuration:{}".format(ini_config))
229 pyramid.paster.setup_logging(ini_config)
229 pyramid.paster.setup_logging(ini_config)
230
230
231 settings = get_app_config(ini_config)
231 settings = get_app_config(ini_config)
232 app = make_pyramid_app({'__file__': ini_config}, **settings)
232 app = make_pyramid_app({'__file__': ini_config}, **settings)
233
233
234 return app
234 return app
235
235
236
236
237 @pytest.fixture(scope='function')
237 @pytest.fixture(scope='function')
238 def app(request, config_stub, baseapp, http_environ):
238 def app(request, config_stub, baseapp, http_environ):
239 app = CustomTestApp(
239 app = CustomTestApp(
240 baseapp,
240 baseapp,
241 extra_environ=http_environ)
241 extra_environ=http_environ)
242 if request.cls:
242 if request.cls:
243 request.cls.app = app
243 request.cls.app = app
244 return app
244 return app
245
245
246
246
247 @pytest.fixture(scope='session')
247 @pytest.fixture(scope='session')
248 def app_settings(baseapp, ini_config):
248 def app_settings(baseapp, ini_config):
249 """
249 """
250 Settings dictionary used to create the app.
250 Settings dictionary used to create the app.
251
251
252 Parses the ini file and passes the result through the sanitize and apply
252 Parses the ini file and passes the result through the sanitize and apply
253 defaults mechanism in `rhodecode.config.middleware`.
253 defaults mechanism in `rhodecode.config.middleware`.
254 """
254 """
255 return baseapp.config.get_settings()
255 return baseapp.config.get_settings()
256
256
257
257
258 @pytest.fixture(scope='session')
258 @pytest.fixture(scope='session')
259 def db_connection(ini_settings):
259 def db_connection(ini_settings):
260 # Initialize the database connection.
260 # Initialize the database connection.
261 config_utils.initialize_database(ini_settings)
261 config_utils.initialize_database(ini_settings)
262
262
263
263
264 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
264 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
265
265
266
266
267 def _autologin_user(app, *args):
267 def _autologin_user(app, *args):
268 session = login_user_session(app, *args)
268 session = login_user_session(app, *args)
269 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
269 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
270 return LoginData(csrf_token, session['rhodecode_user'])
270 return LoginData(csrf_token, session['rhodecode_user'])
271
271
272
272
273 @pytest.fixture
273 @pytest.fixture
274 def autologin_user(app):
274 def autologin_user(app):
275 """
275 """
276 Utility fixture which makes sure that the admin user is logged in
276 Utility fixture which makes sure that the admin user is logged in
277 """
277 """
278 return _autologin_user(app)
278 return _autologin_user(app)
279
279
280
280
281 @pytest.fixture
281 @pytest.fixture
282 def autologin_regular_user(app):
282 def autologin_regular_user(app):
283 """
283 """
284 Utility fixture which makes sure that the regular user is logged in
284 Utility fixture which makes sure that the regular user is logged in
285 """
285 """
286 return _autologin_user(
286 return _autologin_user(
287 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
287 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
288
288
289
289
290 @pytest.fixture(scope='function')
290 @pytest.fixture(scope='function')
291 def csrf_token(request, autologin_user):
291 def csrf_token(request, autologin_user):
292 return autologin_user.csrf_token
292 return autologin_user.csrf_token
293
293
294
294
295 @pytest.fixture(scope='function')
295 @pytest.fixture(scope='function')
296 def xhr_header(request):
296 def xhr_header(request):
297 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
297 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
298
298
299
299
300 @pytest.fixture
300 @pytest.fixture
301 def real_crypto_backend(monkeypatch):
301 def real_crypto_backend(monkeypatch):
302 """
302 """
303 Switch the production crypto backend on for this test.
303 Switch the production crypto backend on for this test.
304
304
305 During the test run the crypto backend is replaced with a faster
305 During the test run the crypto backend is replaced with a faster
306 implementation based on the MD5 algorithm.
306 implementation based on the MD5 algorithm.
307 """
307 """
308 monkeypatch.setattr(rhodecode, 'is_test', False)
308 monkeypatch.setattr(rhodecode, 'is_test', False)
309
309
310
310
311 @pytest.fixture(scope='class')
311 @pytest.fixture(scope='class')
312 def index_location(request, baseapp):
312 def index_location(request, baseapp):
313 index_location = baseapp.config.get_settings()['search.location']
313 index_location = baseapp.config.get_settings()['search.location']
314 if request.cls:
314 if request.cls:
315 request.cls.index_location = index_location
315 request.cls.index_location = index_location
316 return index_location
316 return index_location
317
317
318
318
319 @pytest.fixture(scope='session', autouse=True)
319 @pytest.fixture(scope='session', autouse=True)
320 def tests_tmp_path(request):
320 def tests_tmp_path(request):
321 """
321 """
322 Create temporary directory to be used during the test session.
322 Create temporary directory to be used during the test session.
323 """
323 """
324 if not os.path.exists(TESTS_TMP_PATH):
324 if not os.path.exists(TESTS_TMP_PATH):
325 os.makedirs(TESTS_TMP_PATH)
325 os.makedirs(TESTS_TMP_PATH)
326
326
327 if not request.config.getoption('--keep-tmp-path'):
327 if not request.config.getoption('--keep-tmp-path'):
328 @request.addfinalizer
328 @request.addfinalizer
329 def remove_tmp_path():
329 def remove_tmp_path():
330 shutil.rmtree(TESTS_TMP_PATH)
330 shutil.rmtree(TESTS_TMP_PATH)
331
331
332 return TESTS_TMP_PATH
332 return TESTS_TMP_PATH
333
333
334
334
335 @pytest.fixture
335 @pytest.fixture
336 def test_repo_group(request):
336 def test_repo_group(request):
337 """
337 """
338 Create a temporary repository group, and destroy it after
338 Create a temporary repository group, and destroy it after
339 usage automatically
339 usage automatically
340 """
340 """
341 fixture = Fixture()
341 fixture = Fixture()
342 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
342 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
343 repo_group = fixture.create_repo_group(repogroupid)
343 repo_group = fixture.create_repo_group(repogroupid)
344
344
345 def _cleanup():
345 def _cleanup():
346 fixture.destroy_repo_group(repogroupid)
346 fixture.destroy_repo_group(repogroupid)
347
347
348 request.addfinalizer(_cleanup)
348 request.addfinalizer(_cleanup)
349 return repo_group
349 return repo_group
350
350
351
351
352 @pytest.fixture
352 @pytest.fixture
353 def test_user_group(request):
353 def test_user_group(request):
354 """
354 """
355 Create a temporary user group, and destroy it after
355 Create a temporary user group, and destroy it after
356 usage automatically
356 usage automatically
357 """
357 """
358 fixture = Fixture()
358 fixture = Fixture()
359 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
359 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
360 user_group = fixture.create_user_group(usergroupid)
360 user_group = fixture.create_user_group(usergroupid)
361
361
362 def _cleanup():
362 def _cleanup():
363 fixture.destroy_user_group(user_group)
363 fixture.destroy_user_group(user_group)
364
364
365 request.addfinalizer(_cleanup)
365 request.addfinalizer(_cleanup)
366 return user_group
366 return user_group
367
367
368
368
369 @pytest.fixture(scope='session')
369 @pytest.fixture(scope='session')
370 def test_repo(request):
370 def test_repo(request):
371 container = TestRepoContainer()
371 container = TestRepoContainer()
372 request.addfinalizer(container._cleanup)
372 request.addfinalizer(container._cleanup)
373 return container
373 return container
374
374
375
375
376 class TestRepoContainer(object):
376 class TestRepoContainer(object):
377 """
377 """
378 Container for test repositories which are used read only.
378 Container for test repositories which are used read only.
379
379
380 Repositories will be created on demand and re-used during the lifetime
380 Repositories will be created on demand and re-used during the lifetime
381 of this object.
381 of this object.
382
382
383 Usage to get the svn test repository "minimal"::
383 Usage to get the svn test repository "minimal"::
384
384
385 test_repo = TestContainer()
385 test_repo = TestContainer()
386 repo = test_repo('minimal', 'svn')
386 repo = test_repo('minimal', 'svn')
387
387
388 """
388 """
389
389
390 dump_extractors = {
390 dump_extractors = {
391 'git': utils.extract_git_repo_from_dump,
391 'git': utils.extract_git_repo_from_dump,
392 'hg': utils.extract_hg_repo_from_dump,
392 'hg': utils.extract_hg_repo_from_dump,
393 'svn': utils.extract_svn_repo_from_dump,
393 'svn': utils.extract_svn_repo_from_dump,
394 }
394 }
395
395
396 def __init__(self):
396 def __init__(self):
397 self._cleanup_repos = []
397 self._cleanup_repos = []
398 self._fixture = Fixture()
398 self._fixture = Fixture()
399 self._repos = {}
399 self._repos = {}
400
400
401 def __call__(self, dump_name, backend_alias, config=None):
401 def __call__(self, dump_name, backend_alias, config=None):
402 key = (dump_name, backend_alias)
402 key = (dump_name, backend_alias)
403 if key not in self._repos:
403 if key not in self._repos:
404 repo = self._create_repo(dump_name, backend_alias, config)
404 repo = self._create_repo(dump_name, backend_alias, config)
405 self._repos[key] = repo.repo_id
405 self._repos[key] = repo.repo_id
406 return Repository.get(self._repos[key])
406 return Repository.get(self._repos[key])
407
407
408 def _create_repo(self, dump_name, backend_alias, config):
408 def _create_repo(self, dump_name, backend_alias, config):
409 repo_name = '%s-%s' % (backend_alias, dump_name)
409 repo_name = '%s-%s' % (backend_alias, dump_name)
410 backend_class = get_backend(backend_alias)
410 backend_class = get_backend(backend_alias)
411 dump_extractor = self.dump_extractors[backend_alias]
411 dump_extractor = self.dump_extractors[backend_alias]
412 repo_path = dump_extractor(dump_name, repo_name)
412 repo_path = dump_extractor(dump_name, repo_name)
413
413
414 vcs_repo = backend_class(repo_path, config=config)
414 vcs_repo = backend_class(repo_path, config=config)
415 repo2db_mapper({repo_name: vcs_repo})
415 repo2db_mapper({repo_name: vcs_repo})
416
416
417 repo = RepoModel().get_by_repo_name(repo_name)
417 repo = RepoModel().get_by_repo_name(repo_name)
418 self._cleanup_repos.append(repo_name)
418 self._cleanup_repos.append(repo_name)
419 return repo
419 return repo
420
420
421 def _cleanup(self):
421 def _cleanup(self):
422 for repo_name in reversed(self._cleanup_repos):
422 for repo_name in reversed(self._cleanup_repos):
423 self._fixture.destroy_repo(repo_name)
423 self._fixture.destroy_repo(repo_name)
424
424
425
425
426 @pytest.fixture
426 @pytest.fixture
427 def backend(request, backend_alias, baseapp, test_repo):
427 def backend(request, backend_alias, baseapp, test_repo):
428 """
428 """
429 Parametrized fixture which represents a single backend implementation.
429 Parametrized fixture which represents a single backend implementation.
430
430
431 It respects the option `--backends` to focus the test run on specific
431 It respects the option `--backends` to focus the test run on specific
432 backend implementations.
432 backend implementations.
433
433
434 It also supports `pytest.mark.xfail_backends` to mark tests as failing
434 It also supports `pytest.mark.xfail_backends` to mark tests as failing
435 for specific backends. This is intended as a utility for incremental
435 for specific backends. This is intended as a utility for incremental
436 development of a new backend implementation.
436 development of a new backend implementation.
437 """
437 """
438 if backend_alias not in request.config.getoption('--backends'):
438 if backend_alias not in request.config.getoption('--backends'):
439 pytest.skip("Backend %s not selected." % (backend_alias, ))
439 pytest.skip("Backend %s not selected." % (backend_alias, ))
440
440
441 utils.check_xfail_backends(request.node, backend_alias)
441 utils.check_xfail_backends(request.node, backend_alias)
442 utils.check_skip_backends(request.node, backend_alias)
442 utils.check_skip_backends(request.node, backend_alias)
443
443
444 repo_name = 'vcs_test_%s' % (backend_alias, )
444 repo_name = 'vcs_test_%s' % (backend_alias, )
445 backend = Backend(
445 backend = Backend(
446 alias=backend_alias,
446 alias=backend_alias,
447 repo_name=repo_name,
447 repo_name=repo_name,
448 test_name=request.node.name,
448 test_name=request.node.name,
449 test_repo_container=test_repo)
449 test_repo_container=test_repo)
450 request.addfinalizer(backend.cleanup)
450 request.addfinalizer(backend.cleanup)
451 return backend
451 return backend
452
452
453
453
454 @pytest.fixture
454 @pytest.fixture
455 def backend_git(request, baseapp, test_repo):
455 def backend_git(request, baseapp, test_repo):
456 return backend(request, 'git', baseapp, test_repo)
456 return backend(request, 'git', baseapp, test_repo)
457
457
458
458
459 @pytest.fixture
459 @pytest.fixture
460 def backend_hg(request, baseapp, test_repo):
460 def backend_hg(request, baseapp, test_repo):
461 return backend(request, 'hg', baseapp, test_repo)
461 return backend(request, 'hg', baseapp, test_repo)
462
462
463
463
464 @pytest.fixture
464 @pytest.fixture
465 def backend_svn(request, baseapp, test_repo):
465 def backend_svn(request, baseapp, test_repo):
466 return backend(request, 'svn', baseapp, test_repo)
466 return backend(request, 'svn', baseapp, test_repo)
467
467
468
468
469 @pytest.fixture
469 @pytest.fixture
470 def backend_random(backend_git):
470 def backend_random(backend_git):
471 """
471 """
472 Use this to express that your tests need "a backend.
472 Use this to express that your tests need "a backend.
473
473
474 A few of our tests need a backend, so that we can run the code. This
474 A few of our tests need a backend, so that we can run the code. This
475 fixture is intended to be used for such cases. It will pick one of the
475 fixture is intended to be used for such cases. It will pick one of the
476 backends and run the tests.
476 backends and run the tests.
477
477
478 The fixture `backend` would run the test multiple times for each
478 The fixture `backend` would run the test multiple times for each
479 available backend which is a pure waste of time if the test is
479 available backend which is a pure waste of time if the test is
480 independent of the backend type.
480 independent of the backend type.
481 """
481 """
482 # TODO: johbo: Change this to pick a random backend
482 # TODO: johbo: Change this to pick a random backend
483 return backend_git
483 return backend_git
484
484
485
485
486 @pytest.fixture
486 @pytest.fixture
487 def backend_stub(backend_git):
487 def backend_stub(backend_git):
488 """
488 """
489 Use this to express that your tests need a backend stub
489 Use this to express that your tests need a backend stub
490
490
491 TODO: mikhail: Implement a real stub logic instead of returning
491 TODO: mikhail: Implement a real stub logic instead of returning
492 a git backend
492 a git backend
493 """
493 """
494 return backend_git
494 return backend_git
495
495
496
496
497 @pytest.fixture
497 @pytest.fixture
498 def repo_stub(backend_stub):
498 def repo_stub(backend_stub):
499 """
499 """
500 Use this to express that your tests need a repository stub
500 Use this to express that your tests need a repository stub
501 """
501 """
502 return backend_stub.create_repo()
502 return backend_stub.create_repo()
503
503
504
504
505 class Backend(object):
505 class Backend(object):
506 """
506 """
507 Represents the test configuration for one supported backend
507 Represents the test configuration for one supported backend
508
508
509 Provides easy access to different test repositories based on
509 Provides easy access to different test repositories based on
510 `__getitem__`. Such repositories will only be created once per test
510 `__getitem__`. Such repositories will only be created once per test
511 session.
511 session.
512 """
512 """
513
513
514 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
514 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
515 _master_repo = None
515 _master_repo = None
516 _commit_ids = {}
516 _commit_ids = {}
517
517
518 def __init__(self, alias, repo_name, test_name, test_repo_container):
518 def __init__(self, alias, repo_name, test_name, test_repo_container):
519 self.alias = alias
519 self.alias = alias
520 self.repo_name = repo_name
520 self.repo_name = repo_name
521 self._cleanup_repos = []
521 self._cleanup_repos = []
522 self._test_name = test_name
522 self._test_name = test_name
523 self._test_repo_container = test_repo_container
523 self._test_repo_container = test_repo_container
524 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
524 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
525 # Fixture will survive in the end.
525 # Fixture will survive in the end.
526 self._fixture = Fixture()
526 self._fixture = Fixture()
527
527
528 def __getitem__(self, key):
528 def __getitem__(self, key):
529 return self._test_repo_container(key, self.alias)
529 return self._test_repo_container(key, self.alias)
530
530
531 def create_test_repo(self, key, config=None):
531 def create_test_repo(self, key, config=None):
532 return self._test_repo_container(key, self.alias, config)
532 return self._test_repo_container(key, self.alias, config)
533
533
534 @property
534 @property
535 def repo(self):
535 def repo(self):
536 """
536 """
537 Returns the "current" repository. This is the vcs_test repo or the
537 Returns the "current" repository. This is the vcs_test repo or the
538 last repo which has been created with `create_repo`.
538 last repo which has been created with `create_repo`.
539 """
539 """
540 from rhodecode.model.db import Repository
540 from rhodecode.model.db import Repository
541 return Repository.get_by_repo_name(self.repo_name)
541 return Repository.get_by_repo_name(self.repo_name)
542
542
543 @property
543 @property
544 def default_branch_name(self):
544 def default_branch_name(self):
545 VcsRepository = get_backend(self.alias)
545 VcsRepository = get_backend(self.alias)
546 return VcsRepository.DEFAULT_BRANCH_NAME
546 return VcsRepository.DEFAULT_BRANCH_NAME
547
547
548 @property
548 @property
549 def default_head_id(self):
549 def default_head_id(self):
550 """
550 """
551 Returns the default head id of the underlying backend.
551 Returns the default head id of the underlying backend.
552
552
553 This will be the default branch name in case the backend does have a
553 This will be the default branch name in case the backend does have a
554 default branch. In the other cases it will point to a valid head
554 default branch. In the other cases it will point to a valid head
555 which can serve as the base to create a new commit on top of it.
555 which can serve as the base to create a new commit on top of it.
556 """
556 """
557 vcsrepo = self.repo.scm_instance()
557 vcsrepo = self.repo.scm_instance()
558 head_id = (
558 head_id = (
559 vcsrepo.DEFAULT_BRANCH_NAME or
559 vcsrepo.DEFAULT_BRANCH_NAME or
560 vcsrepo.commit_ids[-1])
560 vcsrepo.commit_ids[-1])
561 return head_id
561 return head_id
562
562
563 @property
563 @property
564 def commit_ids(self):
564 def commit_ids(self):
565 """
565 """
566 Returns the list of commits for the last created repository
566 Returns the list of commits for the last created repository
567 """
567 """
568 return self._commit_ids
568 return self._commit_ids
569
569
570 def create_master_repo(self, commits):
570 def create_master_repo(self, commits):
571 """
571 """
572 Create a repository and remember it as a template.
572 Create a repository and remember it as a template.
573
573
574 This allows to easily create derived repositories to construct
574 This allows to easily create derived repositories to construct
575 more complex scenarios for diff, compare and pull requests.
575 more complex scenarios for diff, compare and pull requests.
576
576
577 Returns a commit map which maps from commit message to raw_id.
577 Returns a commit map which maps from commit message to raw_id.
578 """
578 """
579 self._master_repo = self.create_repo(commits=commits)
579 self._master_repo = self.create_repo(commits=commits)
580 return self._commit_ids
580 return self._commit_ids
581
581
582 def create_repo(
582 def create_repo(
583 self, commits=None, number_of_commits=0, heads=None,
583 self, commits=None, number_of_commits=0, heads=None,
584 name_suffix=u'', **kwargs):
584 name_suffix=u'', bare=False, **kwargs):
585 """
585 """
586 Create a repository and record it for later cleanup.
586 Create a repository and record it for later cleanup.
587
587
588 :param commits: Optional. A sequence of dict instances.
588 :param commits: Optional. A sequence of dict instances.
589 Will add a commit per entry to the new repository.
589 Will add a commit per entry to the new repository.
590 :param number_of_commits: Optional. If set to a number, this number of
590 :param number_of_commits: Optional. If set to a number, this number of
591 commits will be added to the new repository.
591 commits will be added to the new repository.
592 :param heads: Optional. Can be set to a sequence of of commit
592 :param heads: Optional. Can be set to a sequence of of commit
593 names which shall be pulled in from the master repository.
593 names which shall be pulled in from the master repository.
594
594 :param name_suffix: adds special suffix to generated repo name
595 :param bare: set a repo as bare (no checkout)
595 """
596 """
596 self.repo_name = self._next_repo_name() + name_suffix
597 self.repo_name = self._next_repo_name() + name_suffix
597 repo = self._fixture.create_repo(
598 repo = self._fixture.create_repo(
598 self.repo_name, repo_type=self.alias, **kwargs)
599 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
599 self._cleanup_repos.append(repo.repo_name)
600 self._cleanup_repos.append(repo.repo_name)
600
601
601 commits = commits or [
602 commits = commits or [
602 {'message': 'Commit %s of %s' % (x, self.repo_name)}
603 {'message': 'Commit %s of %s' % (x, self.repo_name)}
603 for x in xrange(number_of_commits)]
604 for x in range(number_of_commits)]
604 self._add_commits_to_repo(repo.scm_instance(), commits)
605 self._add_commits_to_repo(repo.scm_instance(), commits)
605 if heads:
606 if heads:
606 self.pull_heads(repo, heads)
607 self.pull_heads(repo, heads)
607
608
608 return repo
609 return repo
609
610
610 def pull_heads(self, repo, heads):
611 def pull_heads(self, repo, heads):
611 """
612 """
612 Make sure that repo contains all commits mentioned in `heads`
613 Make sure that repo contains all commits mentioned in `heads`
613 """
614 """
614 vcsmaster = self._master_repo.scm_instance()
615 vcsmaster = self._master_repo.scm_instance()
615 vcsrepo = repo.scm_instance()
616 vcsrepo = repo.scm_instance()
616 vcsrepo.config.clear_section('hooks')
617 vcsrepo.config.clear_section('hooks')
617 commit_ids = [self._commit_ids[h] for h in heads]
618 commit_ids = [self._commit_ids[h] for h in heads]
618 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
619 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
619
620
620 def create_fork(self):
621 def create_fork(self):
621 repo_to_fork = self.repo_name
622 repo_to_fork = self.repo_name
622 self.repo_name = self._next_repo_name()
623 self.repo_name = self._next_repo_name()
623 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
624 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
624 self._cleanup_repos.append(self.repo_name)
625 self._cleanup_repos.append(self.repo_name)
625 return repo
626 return repo
626
627
627 def new_repo_name(self, suffix=u''):
628 def new_repo_name(self, suffix=u''):
628 self.repo_name = self._next_repo_name() + suffix
629 self.repo_name = self._next_repo_name() + suffix
629 self._cleanup_repos.append(self.repo_name)
630 self._cleanup_repos.append(self.repo_name)
630 return self.repo_name
631 return self.repo_name
631
632
632 def _next_repo_name(self):
633 def _next_repo_name(self):
633 return u"%s_%s" % (
634 return u"%s_%s" % (
634 self.invalid_repo_name.sub(u'_', self._test_name),
635 self.invalid_repo_name.sub(u'_', self._test_name),
635 len(self._cleanup_repos))
636 len(self._cleanup_repos))
636
637
637 def ensure_file(self, filename, content='Test content\n'):
638 def ensure_file(self, filename, content='Test content\n'):
638 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
639 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
639 commits = [
640 commits = [
640 {'added': [
641 {'added': [
641 FileNode(filename, content=content),
642 FileNode(filename, content=content),
642 ]},
643 ]},
643 ]
644 ]
644 self._add_commits_to_repo(self.repo.scm_instance(), commits)
645 self._add_commits_to_repo(self.repo.scm_instance(), commits)
645
646
646 def enable_downloads(self):
647 def enable_downloads(self):
647 repo = self.repo
648 repo = self.repo
648 repo.enable_downloads = True
649 repo.enable_downloads = True
649 Session().add(repo)
650 Session().add(repo)
650 Session().commit()
651 Session().commit()
651
652
652 def cleanup(self):
653 def cleanup(self):
653 for repo_name in reversed(self._cleanup_repos):
654 for repo_name in reversed(self._cleanup_repos):
654 self._fixture.destroy_repo(repo_name)
655 self._fixture.destroy_repo(repo_name)
655
656
656 def _add_commits_to_repo(self, repo, commits):
657 def _add_commits_to_repo(self, repo, commits):
657 commit_ids = _add_commits_to_repo(repo, commits)
658 commit_ids = _add_commits_to_repo(repo, commits)
658 if not commit_ids:
659 if not commit_ids:
659 return
660 return
660 self._commit_ids = commit_ids
661 self._commit_ids = commit_ids
661
662
662 # Creating refs for Git to allow fetching them from remote repository
663 # Creating refs for Git to allow fetching them from remote repository
663 if self.alias == 'git':
664 if self.alias == 'git':
664 refs = {}
665 refs = {}
665 for message in self._commit_ids:
666 for message in self._commit_ids:
666 # TODO: mikhail: do more special chars replacements
667 # TODO: mikhail: do more special chars replacements
667 ref_name = 'refs/test-refs/{}'.format(
668 ref_name = 'refs/test-refs/{}'.format(
668 message.replace(' ', ''))
669 message.replace(' ', ''))
669 refs[ref_name] = self._commit_ids[message]
670 refs[ref_name] = self._commit_ids[message]
670 self._create_refs(repo, refs)
671 self._create_refs(repo, refs)
671
672
672 def _create_refs(self, repo, refs):
673 def _create_refs(self, repo, refs):
673 for ref_name in refs:
674 for ref_name in refs:
674 repo.set_refs(ref_name, refs[ref_name])
675 repo.set_refs(ref_name, refs[ref_name])
675
676
676
677
677 @pytest.fixture
678 @pytest.fixture
678 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
679 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
679 """
680 """
680 Parametrized fixture which represents a single vcs backend implementation.
681 Parametrized fixture which represents a single vcs backend implementation.
681
682
682 See the fixture `backend` for more details. This one implements the same
683 See the fixture `backend` for more details. This one implements the same
683 concept, but on vcs level. So it does not provide model instances etc.
684 concept, but on vcs level. So it does not provide model instances etc.
684
685
685 Parameters are generated dynamically, see :func:`pytest_generate_tests`
686 Parameters are generated dynamically, see :func:`pytest_generate_tests`
686 for how this works.
687 for how this works.
687 """
688 """
688 if backend_alias not in request.config.getoption('--backends'):
689 if backend_alias not in request.config.getoption('--backends'):
689 pytest.skip("Backend %s not selected." % (backend_alias, ))
690 pytest.skip("Backend %s not selected." % (backend_alias, ))
690
691
691 utils.check_xfail_backends(request.node, backend_alias)
692 utils.check_xfail_backends(request.node, backend_alias)
692 utils.check_skip_backends(request.node, backend_alias)
693 utils.check_skip_backends(request.node, backend_alias)
693
694
694 repo_name = 'vcs_test_%s' % (backend_alias, )
695 repo_name = 'vcs_test_%s' % (backend_alias, )
695 repo_path = os.path.join(tests_tmp_path, repo_name)
696 repo_path = os.path.join(tests_tmp_path, repo_name)
696 backend = VcsBackend(
697 backend = VcsBackend(
697 alias=backend_alias,
698 alias=backend_alias,
698 repo_path=repo_path,
699 repo_path=repo_path,
699 test_name=request.node.name,
700 test_name=request.node.name,
700 test_repo_container=test_repo)
701 test_repo_container=test_repo)
701 request.addfinalizer(backend.cleanup)
702 request.addfinalizer(backend.cleanup)
702 return backend
703 return backend
703
704
704
705
705 @pytest.fixture
706 @pytest.fixture
706 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
707 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
707 return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo)
708 return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo)
708
709
709
710
710 @pytest.fixture
711 @pytest.fixture
711 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
712 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
712 return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo)
713 return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo)
713
714
714
715
715 @pytest.fixture
716 @pytest.fixture
716 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
717 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
717 return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo)
718 return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo)
718
719
719
720
720 @pytest.fixture
721 @pytest.fixture
721 def vcsbackend_random(vcsbackend_git):
722 def vcsbackend_random(vcsbackend_git):
722 """
723 """
723 Use this to express that your tests need "a vcsbackend".
724 Use this to express that your tests need "a vcsbackend".
724
725
725 The fixture `vcsbackend` would run the test multiple times for each
726 The fixture `vcsbackend` would run the test multiple times for each
726 available vcs backend which is a pure waste of time if the test is
727 available vcs backend which is a pure waste of time if the test is
727 independent of the vcs backend type.
728 independent of the vcs backend type.
728 """
729 """
729 # TODO: johbo: Change this to pick a random backend
730 # TODO: johbo: Change this to pick a random backend
730 return vcsbackend_git
731 return vcsbackend_git
731
732
732
733
733 @pytest.fixture
734 @pytest.fixture
734 def vcsbackend_stub(vcsbackend_git):
735 def vcsbackend_stub(vcsbackend_git):
735 """
736 """
736 Use this to express that your test just needs a stub of a vcsbackend.
737 Use this to express that your test just needs a stub of a vcsbackend.
737
738
738 Plan is to eventually implement an in-memory stub to speed tests up.
739 Plan is to eventually implement an in-memory stub to speed tests up.
739 """
740 """
740 return vcsbackend_git
741 return vcsbackend_git
741
742
742
743
743 class VcsBackend(object):
744 class VcsBackend(object):
744 """
745 """
745 Represents the test configuration for one supported vcs backend.
746 Represents the test configuration for one supported vcs backend.
746 """
747 """
747
748
748 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
749 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
749
750
750 def __init__(self, alias, repo_path, test_name, test_repo_container):
751 def __init__(self, alias, repo_path, test_name, test_repo_container):
751 self.alias = alias
752 self.alias = alias
752 self._repo_path = repo_path
753 self._repo_path = repo_path
753 self._cleanup_repos = []
754 self._cleanup_repos = []
754 self._test_name = test_name
755 self._test_name = test_name
755 self._test_repo_container = test_repo_container
756 self._test_repo_container = test_repo_container
756
757
757 def __getitem__(self, key):
758 def __getitem__(self, key):
758 return self._test_repo_container(key, self.alias).scm_instance()
759 return self._test_repo_container(key, self.alias).scm_instance()
759
760
760 @property
761 @property
761 def repo(self):
762 def repo(self):
762 """
763 """
763 Returns the "current" repository. This is the vcs_test repo of the last
764 Returns the "current" repository. This is the vcs_test repo of the last
764 repo which has been created.
765 repo which has been created.
765 """
766 """
766 Repository = get_backend(self.alias)
767 Repository = get_backend(self.alias)
767 return Repository(self._repo_path)
768 return Repository(self._repo_path)
768
769
769 @property
770 @property
770 def backend(self):
771 def backend(self):
771 """
772 """
772 Returns the backend implementation class.
773 Returns the backend implementation class.
773 """
774 """
774 return get_backend(self.alias)
775 return get_backend(self.alias)
775
776
776 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
777 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
778 bare=False):
777 repo_name = self._next_repo_name()
779 repo_name = self._next_repo_name()
778 self._repo_path = get_new_dir(repo_name)
780 self._repo_path = get_new_dir(repo_name)
779 repo_class = get_backend(self.alias)
781 repo_class = get_backend(self.alias)
780 src_url = None
782 src_url = None
781 if _clone_repo:
783 if _clone_repo:
782 src_url = _clone_repo.path
784 src_url = _clone_repo.path
783 repo = repo_class(self._repo_path, create=True, src_url=src_url)
785 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
784 self._cleanup_repos.append(repo)
786 self._cleanup_repos.append(repo)
785
787
786 commits = commits or [
788 commits = commits or [
787 {'message': 'Commit %s of %s' % (x, repo_name)}
789 {'message': 'Commit %s of %s' % (x, repo_name)}
788 for x in xrange(number_of_commits)]
790 for x in xrange(number_of_commits)]
789 _add_commits_to_repo(repo, commits)
791 _add_commits_to_repo(repo, commits)
790 return repo
792 return repo
791
793
792 def clone_repo(self, repo):
794 def clone_repo(self, repo):
793 return self.create_repo(_clone_repo=repo)
795 return self.create_repo(_clone_repo=repo)
794
796
795 def cleanup(self):
797 def cleanup(self):
796 for repo in self._cleanup_repos:
798 for repo in self._cleanup_repos:
797 shutil.rmtree(repo.path)
799 shutil.rmtree(repo.path)
798
800
799 def new_repo_path(self):
801 def new_repo_path(self):
800 repo_name = self._next_repo_name()
802 repo_name = self._next_repo_name()
801 self._repo_path = get_new_dir(repo_name)
803 self._repo_path = get_new_dir(repo_name)
802 return self._repo_path
804 return self._repo_path
803
805
804 def _next_repo_name(self):
806 def _next_repo_name(self):
805 return "%s_%s" % (
807 return "%s_%s" % (
806 self.invalid_repo_name.sub('_', self._test_name),
808 self.invalid_repo_name.sub('_', self._test_name),
807 len(self._cleanup_repos))
809 len(self._cleanup_repos))
808
810
809 def add_file(self, repo, filename, content='Test content\n'):
811 def add_file(self, repo, filename, content='Test content\n'):
810 imc = repo.in_memory_commit
812 imc = repo.in_memory_commit
811 imc.add(FileNode(filename, content=content))
813 imc.add(FileNode(filename, content=content))
812 imc.commit(
814 imc.commit(
813 message=u'Automatic commit from vcsbackend fixture',
815 message=u'Automatic commit from vcsbackend fixture',
814 author=u'Automatic')
816 author=u'Automatic')
815
817
816 def ensure_file(self, filename, content='Test content\n'):
818 def ensure_file(self, filename, content='Test content\n'):
817 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
819 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
818 self.add_file(self.repo, filename, content)
820 self.add_file(self.repo, filename, content)
819
821
820
822
821 def _add_commits_to_repo(vcs_repo, commits):
823 def _add_commits_to_repo(vcs_repo, commits):
822 commit_ids = {}
824 commit_ids = {}
823 if not commits:
825 if not commits:
824 return commit_ids
826 return commit_ids
825
827
826 imc = vcs_repo.in_memory_commit
828 imc = vcs_repo.in_memory_commit
827 commit = None
829 commit = None
828
830
829 for idx, commit in enumerate(commits):
831 for idx, commit in enumerate(commits):
830 message = unicode(commit.get('message', 'Commit %s' % idx))
832 message = unicode(commit.get('message', 'Commit %s' % idx))
831
833
832 for node in commit.get('added', []):
834 for node in commit.get('added', []):
833 imc.add(FileNode(node.path, content=node.content))
835 imc.add(FileNode(node.path, content=node.content))
834 for node in commit.get('changed', []):
836 for node in commit.get('changed', []):
835 imc.change(FileNode(node.path, content=node.content))
837 imc.change(FileNode(node.path, content=node.content))
836 for node in commit.get('removed', []):
838 for node in commit.get('removed', []):
837 imc.remove(FileNode(node.path))
839 imc.remove(FileNode(node.path))
838
840
839 parents = [
841 parents = [
840 vcs_repo.get_commit(commit_id=commit_ids[p])
842 vcs_repo.get_commit(commit_id=commit_ids[p])
841 for p in commit.get('parents', [])]
843 for p in commit.get('parents', [])]
842
844
843 operations = ('added', 'changed', 'removed')
845 operations = ('added', 'changed', 'removed')
844 if not any((commit.get(o) for o in operations)):
846 if not any((commit.get(o) for o in operations)):
845 imc.add(FileNode('file_%s' % idx, content=message))
847 imc.add(FileNode('file_%s' % idx, content=message))
846
848
847 commit = imc.commit(
849 commit = imc.commit(
848 message=message,
850 message=message,
849 author=unicode(commit.get('author', 'Automatic')),
851 author=unicode(commit.get('author', 'Automatic')),
850 date=commit.get('date'),
852 date=commit.get('date'),
851 branch=commit.get('branch'),
853 branch=commit.get('branch'),
852 parents=parents)
854 parents=parents)
853
855
854 commit_ids[commit.message] = commit.raw_id
856 commit_ids[commit.message] = commit.raw_id
855
857
856 return commit_ids
858 return commit_ids
857
859
858
860
859 @pytest.fixture
861 @pytest.fixture
860 def reposerver(request):
862 def reposerver(request):
861 """
863 """
862 Allows to serve a backend repository
864 Allows to serve a backend repository
863 """
865 """
864
866
865 repo_server = RepoServer()
867 repo_server = RepoServer()
866 request.addfinalizer(repo_server.cleanup)
868 request.addfinalizer(repo_server.cleanup)
867 return repo_server
869 return repo_server
868
870
869
871
870 class RepoServer(object):
872 class RepoServer(object):
871 """
873 """
872 Utility to serve a local repository for the duration of a test case.
874 Utility to serve a local repository for the duration of a test case.
873
875
874 Supports only Subversion so far.
876 Supports only Subversion so far.
875 """
877 """
876
878
877 url = None
879 url = None
878
880
879 def __init__(self):
881 def __init__(self):
880 self._cleanup_servers = []
882 self._cleanup_servers = []
881
883
882 def serve(self, vcsrepo):
884 def serve(self, vcsrepo):
883 if vcsrepo.alias != 'svn':
885 if vcsrepo.alias != 'svn':
884 raise TypeError("Backend %s not supported" % vcsrepo.alias)
886 raise TypeError("Backend %s not supported" % vcsrepo.alias)
885
887
886 proc = subprocess32.Popen(
888 proc = subprocess32.Popen(
887 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
889 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
888 '--root', vcsrepo.path])
890 '--root', vcsrepo.path])
889 self._cleanup_servers.append(proc)
891 self._cleanup_servers.append(proc)
890 self.url = 'svn://localhost'
892 self.url = 'svn://localhost'
891
893
892 def cleanup(self):
894 def cleanup(self):
893 for proc in self._cleanup_servers:
895 for proc in self._cleanup_servers:
894 proc.terminate()
896 proc.terminate()
895
897
896
898
897 @pytest.fixture
899 @pytest.fixture
898 def pr_util(backend, request, config_stub):
900 def pr_util(backend, request, config_stub):
899 """
901 """
900 Utility for tests of models and for functional tests around pull requests.
902 Utility for tests of models and for functional tests around pull requests.
901
903
902 It gives an instance of :class:`PRTestUtility` which provides various
904 It gives an instance of :class:`PRTestUtility` which provides various
903 utility methods around one pull request.
905 utility methods around one pull request.
904
906
905 This fixture uses `backend` and inherits its parameterization.
907 This fixture uses `backend` and inherits its parameterization.
906 """
908 """
907
909
908 util = PRTestUtility(backend)
910 util = PRTestUtility(backend)
909 request.addfinalizer(util.cleanup)
911 request.addfinalizer(util.cleanup)
910
912
911 return util
913 return util
912
914
913
915
914 class PRTestUtility(object):
916 class PRTestUtility(object):
915
917
916 pull_request = None
918 pull_request = None
917 pull_request_id = None
919 pull_request_id = None
918 mergeable_patcher = None
920 mergeable_patcher = None
919 mergeable_mock = None
921 mergeable_mock = None
920 notification_patcher = None
922 notification_patcher = None
921
923
922 def __init__(self, backend):
924 def __init__(self, backend):
923 self.backend = backend
925 self.backend = backend
924
926
925 def create_pull_request(
927 def create_pull_request(
926 self, commits=None, target_head=None, source_head=None,
928 self, commits=None, target_head=None, source_head=None,
927 revisions=None, approved=False, author=None, mergeable=False,
929 revisions=None, approved=False, author=None, mergeable=False,
928 enable_notifications=True, name_suffix=u'', reviewers=None,
930 enable_notifications=True, name_suffix=u'', reviewers=None,
929 title=u"Test", description=u"Description"):
931 title=u"Test", description=u"Description"):
930 self.set_mergeable(mergeable)
932 self.set_mergeable(mergeable)
931 if not enable_notifications:
933 if not enable_notifications:
932 # mock notification side effect
934 # mock notification side effect
933 self.notification_patcher = mock.patch(
935 self.notification_patcher = mock.patch(
934 'rhodecode.model.notification.NotificationModel.create')
936 'rhodecode.model.notification.NotificationModel.create')
935 self.notification_patcher.start()
937 self.notification_patcher.start()
936
938
937 if not self.pull_request:
939 if not self.pull_request:
938 if not commits:
940 if not commits:
939 commits = [
941 commits = [
940 {'message': 'c1'},
942 {'message': 'c1'},
941 {'message': 'c2'},
943 {'message': 'c2'},
942 {'message': 'c3'},
944 {'message': 'c3'},
943 ]
945 ]
944 target_head = 'c1'
946 target_head = 'c1'
945 source_head = 'c2'
947 source_head = 'c2'
946 revisions = ['c2']
948 revisions = ['c2']
947
949
948 self.commit_ids = self.backend.create_master_repo(commits)
950 self.commit_ids = self.backend.create_master_repo(commits)
949 self.target_repository = self.backend.create_repo(
951 self.target_repository = self.backend.create_repo(
950 heads=[target_head], name_suffix=name_suffix)
952 heads=[target_head], name_suffix=name_suffix)
951 self.source_repository = self.backend.create_repo(
953 self.source_repository = self.backend.create_repo(
952 heads=[source_head], name_suffix=name_suffix)
954 heads=[source_head], name_suffix=name_suffix)
953 self.author = author or UserModel().get_by_username(
955 self.author = author or UserModel().get_by_username(
954 TEST_USER_ADMIN_LOGIN)
956 TEST_USER_ADMIN_LOGIN)
955
957
956 model = PullRequestModel()
958 model = PullRequestModel()
957 self.create_parameters = {
959 self.create_parameters = {
958 'created_by': self.author,
960 'created_by': self.author,
959 'source_repo': self.source_repository.repo_name,
961 'source_repo': self.source_repository.repo_name,
960 'source_ref': self._default_branch_reference(source_head),
962 'source_ref': self._default_branch_reference(source_head),
961 'target_repo': self.target_repository.repo_name,
963 'target_repo': self.target_repository.repo_name,
962 'target_ref': self._default_branch_reference(target_head),
964 'target_ref': self._default_branch_reference(target_head),
963 'revisions': [self.commit_ids[r] for r in revisions],
965 'revisions': [self.commit_ids[r] for r in revisions],
964 'reviewers': reviewers or self._get_reviewers(),
966 'reviewers': reviewers or self._get_reviewers(),
965 'title': title,
967 'title': title,
966 'description': description,
968 'description': description,
967 }
969 }
968 self.pull_request = model.create(**self.create_parameters)
970 self.pull_request = model.create(**self.create_parameters)
969 assert model.get_versions(self.pull_request) == []
971 assert model.get_versions(self.pull_request) == []
970
972
971 self.pull_request_id = self.pull_request.pull_request_id
973 self.pull_request_id = self.pull_request.pull_request_id
972
974
973 if approved:
975 if approved:
974 self.approve()
976 self.approve()
975
977
976 Session().add(self.pull_request)
978 Session().add(self.pull_request)
977 Session().commit()
979 Session().commit()
978
980
979 return self.pull_request
981 return self.pull_request
980
982
981 def approve(self):
983 def approve(self):
982 self.create_status_votes(
984 self.create_status_votes(
983 ChangesetStatus.STATUS_APPROVED,
985 ChangesetStatus.STATUS_APPROVED,
984 *self.pull_request.reviewers)
986 *self.pull_request.reviewers)
985
987
986 def close(self):
988 def close(self):
987 PullRequestModel().close_pull_request(self.pull_request, self.author)
989 PullRequestModel().close_pull_request(self.pull_request, self.author)
988
990
989 def _default_branch_reference(self, commit_message):
991 def _default_branch_reference(self, commit_message):
990 reference = '%s:%s:%s' % (
992 reference = '%s:%s:%s' % (
991 'branch',
993 'branch',
992 self.backend.default_branch_name,
994 self.backend.default_branch_name,
993 self.commit_ids[commit_message])
995 self.commit_ids[commit_message])
994 return reference
996 return reference
995
997
996 def _get_reviewers(self):
998 def _get_reviewers(self):
997 return [
999 return [
998 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1000 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
999 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1001 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1000 ]
1002 ]
1001
1003
1002 def update_source_repository(self, head=None):
1004 def update_source_repository(self, head=None):
1003 heads = [head or 'c3']
1005 heads = [head or 'c3']
1004 self.backend.pull_heads(self.source_repository, heads=heads)
1006 self.backend.pull_heads(self.source_repository, heads=heads)
1005
1007
1006 def add_one_commit(self, head=None):
1008 def add_one_commit(self, head=None):
1007 self.update_source_repository(head=head)
1009 self.update_source_repository(head=head)
1008 old_commit_ids = set(self.pull_request.revisions)
1010 old_commit_ids = set(self.pull_request.revisions)
1009 PullRequestModel().update_commits(self.pull_request)
1011 PullRequestModel().update_commits(self.pull_request)
1010 commit_ids = set(self.pull_request.revisions)
1012 commit_ids = set(self.pull_request.revisions)
1011 new_commit_ids = commit_ids - old_commit_ids
1013 new_commit_ids = commit_ids - old_commit_ids
1012 assert len(new_commit_ids) == 1
1014 assert len(new_commit_ids) == 1
1013 return new_commit_ids.pop()
1015 return new_commit_ids.pop()
1014
1016
1015 def remove_one_commit(self):
1017 def remove_one_commit(self):
1016 assert len(self.pull_request.revisions) == 2
1018 assert len(self.pull_request.revisions) == 2
1017 source_vcs = self.source_repository.scm_instance()
1019 source_vcs = self.source_repository.scm_instance()
1018 removed_commit_id = source_vcs.commit_ids[-1]
1020 removed_commit_id = source_vcs.commit_ids[-1]
1019
1021
1020 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1022 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1021 # remove the if once that's sorted out.
1023 # remove the if once that's sorted out.
1022 if self.backend.alias == "git":
1024 if self.backend.alias == "git":
1023 kwargs = {'branch_name': self.backend.default_branch_name}
1025 kwargs = {'branch_name': self.backend.default_branch_name}
1024 else:
1026 else:
1025 kwargs = {}
1027 kwargs = {}
1026 source_vcs.strip(removed_commit_id, **kwargs)
1028 source_vcs.strip(removed_commit_id, **kwargs)
1027
1029
1028 PullRequestModel().update_commits(self.pull_request)
1030 PullRequestModel().update_commits(self.pull_request)
1029 assert len(self.pull_request.revisions) == 1
1031 assert len(self.pull_request.revisions) == 1
1030 return removed_commit_id
1032 return removed_commit_id
1031
1033
1032 def create_comment(self, linked_to=None):
1034 def create_comment(self, linked_to=None):
1033 comment = CommentsModel().create(
1035 comment = CommentsModel().create(
1034 text=u"Test comment",
1036 text=u"Test comment",
1035 repo=self.target_repository.repo_name,
1037 repo=self.target_repository.repo_name,
1036 user=self.author,
1038 user=self.author,
1037 pull_request=self.pull_request)
1039 pull_request=self.pull_request)
1038 assert comment.pull_request_version_id is None
1040 assert comment.pull_request_version_id is None
1039
1041
1040 if linked_to:
1042 if linked_to:
1041 PullRequestModel()._link_comments_to_version(linked_to)
1043 PullRequestModel()._link_comments_to_version(linked_to)
1042
1044
1043 return comment
1045 return comment
1044
1046
1045 def create_inline_comment(
1047 def create_inline_comment(
1046 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1048 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1047 comment = CommentsModel().create(
1049 comment = CommentsModel().create(
1048 text=u"Test comment",
1050 text=u"Test comment",
1049 repo=self.target_repository.repo_name,
1051 repo=self.target_repository.repo_name,
1050 user=self.author,
1052 user=self.author,
1051 line_no=line_no,
1053 line_no=line_no,
1052 f_path=file_path,
1054 f_path=file_path,
1053 pull_request=self.pull_request)
1055 pull_request=self.pull_request)
1054 assert comment.pull_request_version_id is None
1056 assert comment.pull_request_version_id is None
1055
1057
1056 if linked_to:
1058 if linked_to:
1057 PullRequestModel()._link_comments_to_version(linked_to)
1059 PullRequestModel()._link_comments_to_version(linked_to)
1058
1060
1059 return comment
1061 return comment
1060
1062
1061 def create_version_of_pull_request(self):
1063 def create_version_of_pull_request(self):
1062 pull_request = self.create_pull_request()
1064 pull_request = self.create_pull_request()
1063 version = PullRequestModel()._create_version_from_snapshot(
1065 version = PullRequestModel()._create_version_from_snapshot(
1064 pull_request)
1066 pull_request)
1065 return version
1067 return version
1066
1068
1067 def create_status_votes(self, status, *reviewers):
1069 def create_status_votes(self, status, *reviewers):
1068 for reviewer in reviewers:
1070 for reviewer in reviewers:
1069 ChangesetStatusModel().set_status(
1071 ChangesetStatusModel().set_status(
1070 repo=self.pull_request.target_repo,
1072 repo=self.pull_request.target_repo,
1071 status=status,
1073 status=status,
1072 user=reviewer.user_id,
1074 user=reviewer.user_id,
1073 pull_request=self.pull_request)
1075 pull_request=self.pull_request)
1074
1076
1075 def set_mergeable(self, value):
1077 def set_mergeable(self, value):
1076 if not self.mergeable_patcher:
1078 if not self.mergeable_patcher:
1077 self.mergeable_patcher = mock.patch.object(
1079 self.mergeable_patcher = mock.patch.object(
1078 VcsSettingsModel, 'get_general_settings')
1080 VcsSettingsModel, 'get_general_settings')
1079 self.mergeable_mock = self.mergeable_patcher.start()
1081 self.mergeable_mock = self.mergeable_patcher.start()
1080 self.mergeable_mock.return_value = {
1082 self.mergeable_mock.return_value = {
1081 'rhodecode_pr_merge_enabled': value}
1083 'rhodecode_pr_merge_enabled': value}
1082
1084
1083 def cleanup(self):
1085 def cleanup(self):
1084 # In case the source repository is already cleaned up, the pull
1086 # In case the source repository is already cleaned up, the pull
1085 # request will already be deleted.
1087 # request will already be deleted.
1086 pull_request = PullRequest().get(self.pull_request_id)
1088 pull_request = PullRequest().get(self.pull_request_id)
1087 if pull_request:
1089 if pull_request:
1088 PullRequestModel().delete(pull_request, pull_request.author)
1090 PullRequestModel().delete(pull_request, pull_request.author)
1089 Session().commit()
1091 Session().commit()
1090
1092
1091 if self.notification_patcher:
1093 if self.notification_patcher:
1092 self.notification_patcher.stop()
1094 self.notification_patcher.stop()
1093
1095
1094 if self.mergeable_patcher:
1096 if self.mergeable_patcher:
1095 self.mergeable_patcher.stop()
1097 self.mergeable_patcher.stop()
1096
1098
1097
1099
1098 @pytest.fixture
1100 @pytest.fixture
1099 def user_admin(baseapp):
1101 def user_admin(baseapp):
1100 """
1102 """
1101 Provides the default admin test user as an instance of `db.User`.
1103 Provides the default admin test user as an instance of `db.User`.
1102 """
1104 """
1103 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1105 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1104 return user
1106 return user
1105
1107
1106
1108
1107 @pytest.fixture
1109 @pytest.fixture
1108 def user_regular(baseapp):
1110 def user_regular(baseapp):
1109 """
1111 """
1110 Provides the default regular test user as an instance of `db.User`.
1112 Provides the default regular test user as an instance of `db.User`.
1111 """
1113 """
1112 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1114 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1113 return user
1115 return user
1114
1116
1115
1117
1116 @pytest.fixture
1118 @pytest.fixture
1117 def user_util(request, db_connection):
1119 def user_util(request, db_connection):
1118 """
1120 """
1119 Provides a wired instance of `UserUtility` with integrated cleanup.
1121 Provides a wired instance of `UserUtility` with integrated cleanup.
1120 """
1122 """
1121 utility = UserUtility(test_name=request.node.name)
1123 utility = UserUtility(test_name=request.node.name)
1122 request.addfinalizer(utility.cleanup)
1124 request.addfinalizer(utility.cleanup)
1123 return utility
1125 return utility
1124
1126
1125
1127
1126 # TODO: johbo: Split this up into utilities per domain or something similar
1128 # TODO: johbo: Split this up into utilities per domain or something similar
1127 class UserUtility(object):
1129 class UserUtility(object):
1128
1130
1129 def __init__(self, test_name="test"):
1131 def __init__(self, test_name="test"):
1130 self._test_name = self._sanitize_name(test_name)
1132 self._test_name = self._sanitize_name(test_name)
1131 self.fixture = Fixture()
1133 self.fixture = Fixture()
1132 self.repo_group_ids = []
1134 self.repo_group_ids = []
1133 self.repos_ids = []
1135 self.repos_ids = []
1134 self.user_ids = []
1136 self.user_ids = []
1135 self.user_group_ids = []
1137 self.user_group_ids = []
1136 self.user_repo_permission_ids = []
1138 self.user_repo_permission_ids = []
1137 self.user_group_repo_permission_ids = []
1139 self.user_group_repo_permission_ids = []
1138 self.user_repo_group_permission_ids = []
1140 self.user_repo_group_permission_ids = []
1139 self.user_group_repo_group_permission_ids = []
1141 self.user_group_repo_group_permission_ids = []
1140 self.user_user_group_permission_ids = []
1142 self.user_user_group_permission_ids = []
1141 self.user_group_user_group_permission_ids = []
1143 self.user_group_user_group_permission_ids = []
1142 self.user_permissions = []
1144 self.user_permissions = []
1143
1145
1144 def _sanitize_name(self, name):
1146 def _sanitize_name(self, name):
1145 for char in ['[', ']']:
1147 for char in ['[', ']']:
1146 name = name.replace(char, '_')
1148 name = name.replace(char, '_')
1147 return name
1149 return name
1148
1150
1149 def create_repo_group(
1151 def create_repo_group(
1150 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1152 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1151 group_name = "{prefix}_repogroup_{count}".format(
1153 group_name = "{prefix}_repogroup_{count}".format(
1152 prefix=self._test_name,
1154 prefix=self._test_name,
1153 count=len(self.repo_group_ids))
1155 count=len(self.repo_group_ids))
1154 repo_group = self.fixture.create_repo_group(
1156 repo_group = self.fixture.create_repo_group(
1155 group_name, cur_user=owner)
1157 group_name, cur_user=owner)
1156 if auto_cleanup:
1158 if auto_cleanup:
1157 self.repo_group_ids.append(repo_group.group_id)
1159 self.repo_group_ids.append(repo_group.group_id)
1158 return repo_group
1160 return repo_group
1159
1161
1160 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1162 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1161 auto_cleanup=True, repo_type='hg'):
1163 auto_cleanup=True, repo_type='hg', bare=False):
1162 repo_name = "{prefix}_repository_{count}".format(
1164 repo_name = "{prefix}_repository_{count}".format(
1163 prefix=self._test_name,
1165 prefix=self._test_name,
1164 count=len(self.repos_ids))
1166 count=len(self.repos_ids))
1165
1167
1166 repository = self.fixture.create_repo(
1168 repository = self.fixture.create_repo(
1167 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type)
1169 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1168 if auto_cleanup:
1170 if auto_cleanup:
1169 self.repos_ids.append(repository.repo_id)
1171 self.repos_ids.append(repository.repo_id)
1170 return repository
1172 return repository
1171
1173
1172 def create_user(self, auto_cleanup=True, **kwargs):
1174 def create_user(self, auto_cleanup=True, **kwargs):
1173 user_name = "{prefix}_user_{count}".format(
1175 user_name = "{prefix}_user_{count}".format(
1174 prefix=self._test_name,
1176 prefix=self._test_name,
1175 count=len(self.user_ids))
1177 count=len(self.user_ids))
1176 user = self.fixture.create_user(user_name, **kwargs)
1178 user = self.fixture.create_user(user_name, **kwargs)
1177 if auto_cleanup:
1179 if auto_cleanup:
1178 self.user_ids.append(user.user_id)
1180 self.user_ids.append(user.user_id)
1179 return user
1181 return user
1180
1182
1181 def create_additional_user_email(self, user, email):
1183 def create_additional_user_email(self, user, email):
1182 uem = self.fixture.create_additional_user_email(user=user, email=email)
1184 uem = self.fixture.create_additional_user_email(user=user, email=email)
1183 return uem
1185 return uem
1184
1186
1185 def create_user_with_group(self):
1187 def create_user_with_group(self):
1186 user = self.create_user()
1188 user = self.create_user()
1187 user_group = self.create_user_group(members=[user])
1189 user_group = self.create_user_group(members=[user])
1188 return user, user_group
1190 return user, user_group
1189
1191
1190 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1192 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1191 auto_cleanup=True, **kwargs):
1193 auto_cleanup=True, **kwargs):
1192 group_name = "{prefix}_usergroup_{count}".format(
1194 group_name = "{prefix}_usergroup_{count}".format(
1193 prefix=self._test_name,
1195 prefix=self._test_name,
1194 count=len(self.user_group_ids))
1196 count=len(self.user_group_ids))
1195 user_group = self.fixture.create_user_group(
1197 user_group = self.fixture.create_user_group(
1196 group_name, cur_user=owner, **kwargs)
1198 group_name, cur_user=owner, **kwargs)
1197
1199
1198 if auto_cleanup:
1200 if auto_cleanup:
1199 self.user_group_ids.append(user_group.users_group_id)
1201 self.user_group_ids.append(user_group.users_group_id)
1200 if members:
1202 if members:
1201 for user in members:
1203 for user in members:
1202 UserGroupModel().add_user_to_group(user_group, user)
1204 UserGroupModel().add_user_to_group(user_group, user)
1203 return user_group
1205 return user_group
1204
1206
1205 def grant_user_permission(self, user_name, permission_name):
1207 def grant_user_permission(self, user_name, permission_name):
1206 self._inherit_default_user_permissions(user_name, False)
1208 self._inherit_default_user_permissions(user_name, False)
1207 self.user_permissions.append((user_name, permission_name))
1209 self.user_permissions.append((user_name, permission_name))
1208
1210
1209 def grant_user_permission_to_repo_group(
1211 def grant_user_permission_to_repo_group(
1210 self, repo_group, user, permission_name):
1212 self, repo_group, user, permission_name):
1211 permission = RepoGroupModel().grant_user_permission(
1213 permission = RepoGroupModel().grant_user_permission(
1212 repo_group, user, permission_name)
1214 repo_group, user, permission_name)
1213 self.user_repo_group_permission_ids.append(
1215 self.user_repo_group_permission_ids.append(
1214 (repo_group.group_id, user.user_id))
1216 (repo_group.group_id, user.user_id))
1215 return permission
1217 return permission
1216
1218
1217 def grant_user_group_permission_to_repo_group(
1219 def grant_user_group_permission_to_repo_group(
1218 self, repo_group, user_group, permission_name):
1220 self, repo_group, user_group, permission_name):
1219 permission = RepoGroupModel().grant_user_group_permission(
1221 permission = RepoGroupModel().grant_user_group_permission(
1220 repo_group, user_group, permission_name)
1222 repo_group, user_group, permission_name)
1221 self.user_group_repo_group_permission_ids.append(
1223 self.user_group_repo_group_permission_ids.append(
1222 (repo_group.group_id, user_group.users_group_id))
1224 (repo_group.group_id, user_group.users_group_id))
1223 return permission
1225 return permission
1224
1226
1225 def grant_user_permission_to_repo(
1227 def grant_user_permission_to_repo(
1226 self, repo, user, permission_name):
1228 self, repo, user, permission_name):
1227 permission = RepoModel().grant_user_permission(
1229 permission = RepoModel().grant_user_permission(
1228 repo, user, permission_name)
1230 repo, user, permission_name)
1229 self.user_repo_permission_ids.append(
1231 self.user_repo_permission_ids.append(
1230 (repo.repo_id, user.user_id))
1232 (repo.repo_id, user.user_id))
1231 return permission
1233 return permission
1232
1234
1233 def grant_user_group_permission_to_repo(
1235 def grant_user_group_permission_to_repo(
1234 self, repo, user_group, permission_name):
1236 self, repo, user_group, permission_name):
1235 permission = RepoModel().grant_user_group_permission(
1237 permission = RepoModel().grant_user_group_permission(
1236 repo, user_group, permission_name)
1238 repo, user_group, permission_name)
1237 self.user_group_repo_permission_ids.append(
1239 self.user_group_repo_permission_ids.append(
1238 (repo.repo_id, user_group.users_group_id))
1240 (repo.repo_id, user_group.users_group_id))
1239 return permission
1241 return permission
1240
1242
1241 def grant_user_permission_to_user_group(
1243 def grant_user_permission_to_user_group(
1242 self, target_user_group, user, permission_name):
1244 self, target_user_group, user, permission_name):
1243 permission = UserGroupModel().grant_user_permission(
1245 permission = UserGroupModel().grant_user_permission(
1244 target_user_group, user, permission_name)
1246 target_user_group, user, permission_name)
1245 self.user_user_group_permission_ids.append(
1247 self.user_user_group_permission_ids.append(
1246 (target_user_group.users_group_id, user.user_id))
1248 (target_user_group.users_group_id, user.user_id))
1247 return permission
1249 return permission
1248
1250
1249 def grant_user_group_permission_to_user_group(
1251 def grant_user_group_permission_to_user_group(
1250 self, target_user_group, user_group, permission_name):
1252 self, target_user_group, user_group, permission_name):
1251 permission = UserGroupModel().grant_user_group_permission(
1253 permission = UserGroupModel().grant_user_group_permission(
1252 target_user_group, user_group, permission_name)
1254 target_user_group, user_group, permission_name)
1253 self.user_group_user_group_permission_ids.append(
1255 self.user_group_user_group_permission_ids.append(
1254 (target_user_group.users_group_id, user_group.users_group_id))
1256 (target_user_group.users_group_id, user_group.users_group_id))
1255 return permission
1257 return permission
1256
1258
1257 def revoke_user_permission(self, user_name, permission_name):
1259 def revoke_user_permission(self, user_name, permission_name):
1258 self._inherit_default_user_permissions(user_name, True)
1260 self._inherit_default_user_permissions(user_name, True)
1259 UserModel().revoke_perm(user_name, permission_name)
1261 UserModel().revoke_perm(user_name, permission_name)
1260
1262
1261 def _inherit_default_user_permissions(self, user_name, value):
1263 def _inherit_default_user_permissions(self, user_name, value):
1262 user = UserModel().get_by_username(user_name)
1264 user = UserModel().get_by_username(user_name)
1263 user.inherit_default_permissions = value
1265 user.inherit_default_permissions = value
1264 Session().add(user)
1266 Session().add(user)
1265 Session().commit()
1267 Session().commit()
1266
1268
1267 def cleanup(self):
1269 def cleanup(self):
1268 self._cleanup_permissions()
1270 self._cleanup_permissions()
1269 self._cleanup_repos()
1271 self._cleanup_repos()
1270 self._cleanup_repo_groups()
1272 self._cleanup_repo_groups()
1271 self._cleanup_user_groups()
1273 self._cleanup_user_groups()
1272 self._cleanup_users()
1274 self._cleanup_users()
1273
1275
1274 def _cleanup_permissions(self):
1276 def _cleanup_permissions(self):
1275 if self.user_permissions:
1277 if self.user_permissions:
1276 for user_name, permission_name in self.user_permissions:
1278 for user_name, permission_name in self.user_permissions:
1277 self.revoke_user_permission(user_name, permission_name)
1279 self.revoke_user_permission(user_name, permission_name)
1278
1280
1279 for permission in self.user_repo_permission_ids:
1281 for permission in self.user_repo_permission_ids:
1280 RepoModel().revoke_user_permission(*permission)
1282 RepoModel().revoke_user_permission(*permission)
1281
1283
1282 for permission in self.user_group_repo_permission_ids:
1284 for permission in self.user_group_repo_permission_ids:
1283 RepoModel().revoke_user_group_permission(*permission)
1285 RepoModel().revoke_user_group_permission(*permission)
1284
1286
1285 for permission in self.user_repo_group_permission_ids:
1287 for permission in self.user_repo_group_permission_ids:
1286 RepoGroupModel().revoke_user_permission(*permission)
1288 RepoGroupModel().revoke_user_permission(*permission)
1287
1289
1288 for permission in self.user_group_repo_group_permission_ids:
1290 for permission in self.user_group_repo_group_permission_ids:
1289 RepoGroupModel().revoke_user_group_permission(*permission)
1291 RepoGroupModel().revoke_user_group_permission(*permission)
1290
1292
1291 for permission in self.user_user_group_permission_ids:
1293 for permission in self.user_user_group_permission_ids:
1292 UserGroupModel().revoke_user_permission(*permission)
1294 UserGroupModel().revoke_user_permission(*permission)
1293
1295
1294 for permission in self.user_group_user_group_permission_ids:
1296 for permission in self.user_group_user_group_permission_ids:
1295 UserGroupModel().revoke_user_group_permission(*permission)
1297 UserGroupModel().revoke_user_group_permission(*permission)
1296
1298
1297 def _cleanup_repo_groups(self):
1299 def _cleanup_repo_groups(self):
1298 def _repo_group_compare(first_group_id, second_group_id):
1300 def _repo_group_compare(first_group_id, second_group_id):
1299 """
1301 """
1300 Gives higher priority to the groups with the most complex paths
1302 Gives higher priority to the groups with the most complex paths
1301 """
1303 """
1302 first_group = RepoGroup.get(first_group_id)
1304 first_group = RepoGroup.get(first_group_id)
1303 second_group = RepoGroup.get(second_group_id)
1305 second_group = RepoGroup.get(second_group_id)
1304 first_group_parts = (
1306 first_group_parts = (
1305 len(first_group.group_name.split('/')) if first_group else 0)
1307 len(first_group.group_name.split('/')) if first_group else 0)
1306 second_group_parts = (
1308 second_group_parts = (
1307 len(second_group.group_name.split('/')) if second_group else 0)
1309 len(second_group.group_name.split('/')) if second_group else 0)
1308 return cmp(second_group_parts, first_group_parts)
1310 return cmp(second_group_parts, first_group_parts)
1309
1311
1310 sorted_repo_group_ids = sorted(
1312 sorted_repo_group_ids = sorted(
1311 self.repo_group_ids, cmp=_repo_group_compare)
1313 self.repo_group_ids, cmp=_repo_group_compare)
1312 for repo_group_id in sorted_repo_group_ids:
1314 for repo_group_id in sorted_repo_group_ids:
1313 self.fixture.destroy_repo_group(repo_group_id)
1315 self.fixture.destroy_repo_group(repo_group_id)
1314
1316
1315 def _cleanup_repos(self):
1317 def _cleanup_repos(self):
1316 sorted_repos_ids = sorted(self.repos_ids)
1318 sorted_repos_ids = sorted(self.repos_ids)
1317 for repo_id in sorted_repos_ids:
1319 for repo_id in sorted_repos_ids:
1318 self.fixture.destroy_repo(repo_id)
1320 self.fixture.destroy_repo(repo_id)
1319
1321
1320 def _cleanup_user_groups(self):
1322 def _cleanup_user_groups(self):
1321 def _user_group_compare(first_group_id, second_group_id):
1323 def _user_group_compare(first_group_id, second_group_id):
1322 """
1324 """
1323 Gives higher priority to the groups with the most complex paths
1325 Gives higher priority to the groups with the most complex paths
1324 """
1326 """
1325 first_group = UserGroup.get(first_group_id)
1327 first_group = UserGroup.get(first_group_id)
1326 second_group = UserGroup.get(second_group_id)
1328 second_group = UserGroup.get(second_group_id)
1327 first_group_parts = (
1329 first_group_parts = (
1328 len(first_group.users_group_name.split('/'))
1330 len(first_group.users_group_name.split('/'))
1329 if first_group else 0)
1331 if first_group else 0)
1330 second_group_parts = (
1332 second_group_parts = (
1331 len(second_group.users_group_name.split('/'))
1333 len(second_group.users_group_name.split('/'))
1332 if second_group else 0)
1334 if second_group else 0)
1333 return cmp(second_group_parts, first_group_parts)
1335 return cmp(second_group_parts, first_group_parts)
1334
1336
1335 sorted_user_group_ids = sorted(
1337 sorted_user_group_ids = sorted(
1336 self.user_group_ids, cmp=_user_group_compare)
1338 self.user_group_ids, cmp=_user_group_compare)
1337 for user_group_id in sorted_user_group_ids:
1339 for user_group_id in sorted_user_group_ids:
1338 self.fixture.destroy_user_group(user_group_id)
1340 self.fixture.destroy_user_group(user_group_id)
1339
1341
1340 def _cleanup_users(self):
1342 def _cleanup_users(self):
1341 for user_id in self.user_ids:
1343 for user_id in self.user_ids:
1342 self.fixture.destroy_user(user_id)
1344 self.fixture.destroy_user(user_id)
1343
1345
1344
1346
1345 # TODO: Think about moving this into a pytest-pyro package and make it a
1347 # TODO: Think about moving this into a pytest-pyro package and make it a
1346 # pytest plugin
1348 # pytest plugin
1347 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1349 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1348 def pytest_runtest_makereport(item, call):
1350 def pytest_runtest_makereport(item, call):
1349 """
1351 """
1350 Adding the remote traceback if the exception has this information.
1352 Adding the remote traceback if the exception has this information.
1351
1353
1352 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1354 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1353 to the exception instance.
1355 to the exception instance.
1354 """
1356 """
1355 outcome = yield
1357 outcome = yield
1356 report = outcome.get_result()
1358 report = outcome.get_result()
1357 if call.excinfo:
1359 if call.excinfo:
1358 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1360 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1359
1361
1360
1362
1361 def _add_vcsserver_remote_traceback(report, exc):
1363 def _add_vcsserver_remote_traceback(report, exc):
1362 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1364 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1363
1365
1364 if vcsserver_traceback:
1366 if vcsserver_traceback:
1365 section = 'VCSServer remote traceback ' + report.when
1367 section = 'VCSServer remote traceback ' + report.when
1366 report.sections.append((section, vcsserver_traceback))
1368 report.sections.append((section, vcsserver_traceback))
1367
1369
1368
1370
1369 @pytest.fixture(scope='session')
1371 @pytest.fixture(scope='session')
1370 def testrun():
1372 def testrun():
1371 return {
1373 return {
1372 'uuid': uuid.uuid4(),
1374 'uuid': uuid.uuid4(),
1373 'start': datetime.datetime.utcnow().isoformat(),
1375 'start': datetime.datetime.utcnow().isoformat(),
1374 'timestamp': int(time.time()),
1376 'timestamp': int(time.time()),
1375 }
1377 }
1376
1378
1377
1379
1378 @pytest.fixture(autouse=True)
1380 @pytest.fixture(autouse=True)
1379 def collect_appenlight_stats(request, testrun):
1381 def collect_appenlight_stats(request, testrun):
1380 """
1382 """
1381 This fixture reports memory consumtion of single tests.
1383 This fixture reports memory consumtion of single tests.
1382
1384
1383 It gathers data based on `psutil` and sends them to Appenlight. The option
1385 It gathers data based on `psutil` and sends them to Appenlight. The option
1384 ``--ae`` has te be used to enable this fixture and the API key for your
1386 ``--ae`` has te be used to enable this fixture and the API key for your
1385 application has to be provided in ``--ae-key``.
1387 application has to be provided in ``--ae-key``.
1386 """
1388 """
1387 try:
1389 try:
1388 # cygwin cannot have yet psutil support.
1390 # cygwin cannot have yet psutil support.
1389 import psutil
1391 import psutil
1390 except ImportError:
1392 except ImportError:
1391 return
1393 return
1392
1394
1393 if not request.config.getoption('--appenlight'):
1395 if not request.config.getoption('--appenlight'):
1394 return
1396 return
1395 else:
1397 else:
1396 # Only request the baseapp fixture if appenlight tracking is
1398 # Only request the baseapp fixture if appenlight tracking is
1397 # enabled. This will speed up a test run of unit tests by 2 to 3
1399 # enabled. This will speed up a test run of unit tests by 2 to 3
1398 # seconds if appenlight is not enabled.
1400 # seconds if appenlight is not enabled.
1399 baseapp = request.getfuncargvalue("baseapp")
1401 baseapp = request.getfuncargvalue("baseapp")
1400 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1402 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1401 client = AppenlightClient(
1403 client = AppenlightClient(
1402 url=url,
1404 url=url,
1403 api_key=request.config.getoption('--appenlight-api-key'),
1405 api_key=request.config.getoption('--appenlight-api-key'),
1404 namespace=request.node.nodeid,
1406 namespace=request.node.nodeid,
1405 request=str(testrun['uuid']),
1407 request=str(testrun['uuid']),
1406 testrun=testrun)
1408 testrun=testrun)
1407
1409
1408 client.collect({
1410 client.collect({
1409 'message': "Starting",
1411 'message': "Starting",
1410 })
1412 })
1411
1413
1412 server_and_port = baseapp.config.get_settings()['vcs.server']
1414 server_and_port = baseapp.config.get_settings()['vcs.server']
1413 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1415 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1414 server = create_vcsserver_proxy(server_and_port, protocol)
1416 server = create_vcsserver_proxy(server_and_port, protocol)
1415 with server:
1417 with server:
1416 vcs_pid = server.get_pid()
1418 vcs_pid = server.get_pid()
1417 server.run_gc()
1419 server.run_gc()
1418 vcs_process = psutil.Process(vcs_pid)
1420 vcs_process = psutil.Process(vcs_pid)
1419 mem = vcs_process.memory_info()
1421 mem = vcs_process.memory_info()
1420 client.tag_before('vcsserver.rss', mem.rss)
1422 client.tag_before('vcsserver.rss', mem.rss)
1421 client.tag_before('vcsserver.vms', mem.vms)
1423 client.tag_before('vcsserver.vms', mem.vms)
1422
1424
1423 test_process = psutil.Process()
1425 test_process = psutil.Process()
1424 mem = test_process.memory_info()
1426 mem = test_process.memory_info()
1425 client.tag_before('test.rss', mem.rss)
1427 client.tag_before('test.rss', mem.rss)
1426 client.tag_before('test.vms', mem.vms)
1428 client.tag_before('test.vms', mem.vms)
1427
1429
1428 client.tag_before('time', time.time())
1430 client.tag_before('time', time.time())
1429
1431
1430 @request.addfinalizer
1432 @request.addfinalizer
1431 def send_stats():
1433 def send_stats():
1432 client.tag_after('time', time.time())
1434 client.tag_after('time', time.time())
1433 with server:
1435 with server:
1434 gc_stats = server.run_gc()
1436 gc_stats = server.run_gc()
1435 for tag, value in gc_stats.items():
1437 for tag, value in gc_stats.items():
1436 client.tag_after(tag, value)
1438 client.tag_after(tag, value)
1437 mem = vcs_process.memory_info()
1439 mem = vcs_process.memory_info()
1438 client.tag_after('vcsserver.rss', mem.rss)
1440 client.tag_after('vcsserver.rss', mem.rss)
1439 client.tag_after('vcsserver.vms', mem.vms)
1441 client.tag_after('vcsserver.vms', mem.vms)
1440
1442
1441 mem = test_process.memory_info()
1443 mem = test_process.memory_info()
1442 client.tag_after('test.rss', mem.rss)
1444 client.tag_after('test.rss', mem.rss)
1443 client.tag_after('test.vms', mem.vms)
1445 client.tag_after('test.vms', mem.vms)
1444
1446
1445 client.collect({
1447 client.collect({
1446 'message': "Finished",
1448 'message': "Finished",
1447 })
1449 })
1448 client.send_stats()
1450 client.send_stats()
1449
1451
1450 return client
1452 return client
1451
1453
1452
1454
1453 class AppenlightClient():
1455 class AppenlightClient():
1454
1456
1455 url_template = '{url}?protocol_version=0.5'
1457 url_template = '{url}?protocol_version=0.5'
1456
1458
1457 def __init__(
1459 def __init__(
1458 self, url, api_key, add_server=True, add_timestamp=True,
1460 self, url, api_key, add_server=True, add_timestamp=True,
1459 namespace=None, request=None, testrun=None):
1461 namespace=None, request=None, testrun=None):
1460 self.url = self.url_template.format(url=url)
1462 self.url = self.url_template.format(url=url)
1461 self.api_key = api_key
1463 self.api_key = api_key
1462 self.add_server = add_server
1464 self.add_server = add_server
1463 self.add_timestamp = add_timestamp
1465 self.add_timestamp = add_timestamp
1464 self.namespace = namespace
1466 self.namespace = namespace
1465 self.request = request
1467 self.request = request
1466 self.server = socket.getfqdn(socket.gethostname())
1468 self.server = socket.getfqdn(socket.gethostname())
1467 self.tags_before = {}
1469 self.tags_before = {}
1468 self.tags_after = {}
1470 self.tags_after = {}
1469 self.stats = []
1471 self.stats = []
1470 self.testrun = testrun or {}
1472 self.testrun = testrun or {}
1471
1473
1472 def tag_before(self, tag, value):
1474 def tag_before(self, tag, value):
1473 self.tags_before[tag] = value
1475 self.tags_before[tag] = value
1474
1476
1475 def tag_after(self, tag, value):
1477 def tag_after(self, tag, value):
1476 self.tags_after[tag] = value
1478 self.tags_after[tag] = value
1477
1479
1478 def collect(self, data):
1480 def collect(self, data):
1479 if self.add_server:
1481 if self.add_server:
1480 data.setdefault('server', self.server)
1482 data.setdefault('server', self.server)
1481 if self.add_timestamp:
1483 if self.add_timestamp:
1482 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1484 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1483 if self.namespace:
1485 if self.namespace:
1484 data.setdefault('namespace', self.namespace)
1486 data.setdefault('namespace', self.namespace)
1485 if self.request:
1487 if self.request:
1486 data.setdefault('request', self.request)
1488 data.setdefault('request', self.request)
1487 self.stats.append(data)
1489 self.stats.append(data)
1488
1490
1489 def send_stats(self):
1491 def send_stats(self):
1490 tags = [
1492 tags = [
1491 ('testrun', self.request),
1493 ('testrun', self.request),
1492 ('testrun.start', self.testrun['start']),
1494 ('testrun.start', self.testrun['start']),
1493 ('testrun.timestamp', self.testrun['timestamp']),
1495 ('testrun.timestamp', self.testrun['timestamp']),
1494 ('test', self.namespace),
1496 ('test', self.namespace),
1495 ]
1497 ]
1496 for key, value in self.tags_before.items():
1498 for key, value in self.tags_before.items():
1497 tags.append((key + '.before', value))
1499 tags.append((key + '.before', value))
1498 try:
1500 try:
1499 delta = self.tags_after[key] - value
1501 delta = self.tags_after[key] - value
1500 tags.append((key + '.delta', delta))
1502 tags.append((key + '.delta', delta))
1501 except Exception:
1503 except Exception:
1502 pass
1504 pass
1503 for key, value in self.tags_after.items():
1505 for key, value in self.tags_after.items():
1504 tags.append((key + '.after', value))
1506 tags.append((key + '.after', value))
1505 self.collect({
1507 self.collect({
1506 'message': "Collected tags",
1508 'message': "Collected tags",
1507 'tags': tags,
1509 'tags': tags,
1508 })
1510 })
1509
1511
1510 response = requests.post(
1512 response = requests.post(
1511 self.url,
1513 self.url,
1512 headers={
1514 headers={
1513 'X-appenlight-api-key': self.api_key},
1515 'X-appenlight-api-key': self.api_key},
1514 json=self.stats,
1516 json=self.stats,
1515 )
1517 )
1516
1518
1517 if not response.status_code == 200:
1519 if not response.status_code == 200:
1518 pprint.pprint(self.stats)
1520 pprint.pprint(self.stats)
1519 print(response.headers)
1521 print(response.headers)
1520 print(response.text)
1522 print(response.text)
1521 raise Exception('Sending to appenlight failed')
1523 raise Exception('Sending to appenlight failed')
1522
1524
1523
1525
1524 @pytest.fixture
1526 @pytest.fixture
1525 def gist_util(request, db_connection):
1527 def gist_util(request, db_connection):
1526 """
1528 """
1527 Provides a wired instance of `GistUtility` with integrated cleanup.
1529 Provides a wired instance of `GistUtility` with integrated cleanup.
1528 """
1530 """
1529 utility = GistUtility()
1531 utility = GistUtility()
1530 request.addfinalizer(utility.cleanup)
1532 request.addfinalizer(utility.cleanup)
1531 return utility
1533 return utility
1532
1534
1533
1535
1534 class GistUtility(object):
1536 class GistUtility(object):
1535 def __init__(self):
1537 def __init__(self):
1536 self.fixture = Fixture()
1538 self.fixture = Fixture()
1537 self.gist_ids = []
1539 self.gist_ids = []
1538
1540
1539 def create_gist(self, **kwargs):
1541 def create_gist(self, **kwargs):
1540 gist = self.fixture.create_gist(**kwargs)
1542 gist = self.fixture.create_gist(**kwargs)
1541 self.gist_ids.append(gist.gist_id)
1543 self.gist_ids.append(gist.gist_id)
1542 return gist
1544 return gist
1543
1545
1544 def cleanup(self):
1546 def cleanup(self):
1545 for id_ in self.gist_ids:
1547 for id_ in self.gist_ids:
1546 self.fixture.destroy_gists(str(id_))
1548 self.fixture.destroy_gists(str(id_))
1547
1549
1548
1550
1549 @pytest.fixture
1551 @pytest.fixture
1550 def enabled_backends(request):
1552 def enabled_backends(request):
1551 backends = request.config.option.backends
1553 backends = request.config.option.backends
1552 return backends[:]
1554 return backends[:]
1553
1555
1554
1556
1555 @pytest.fixture
1557 @pytest.fixture
1556 def settings_util(request, db_connection):
1558 def settings_util(request, db_connection):
1557 """
1559 """
1558 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1560 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1559 """
1561 """
1560 utility = SettingsUtility()
1562 utility = SettingsUtility()
1561 request.addfinalizer(utility.cleanup)
1563 request.addfinalizer(utility.cleanup)
1562 return utility
1564 return utility
1563
1565
1564
1566
1565 class SettingsUtility(object):
1567 class SettingsUtility(object):
1566 def __init__(self):
1568 def __init__(self):
1567 self.rhodecode_ui_ids = []
1569 self.rhodecode_ui_ids = []
1568 self.rhodecode_setting_ids = []
1570 self.rhodecode_setting_ids = []
1569 self.repo_rhodecode_ui_ids = []
1571 self.repo_rhodecode_ui_ids = []
1570 self.repo_rhodecode_setting_ids = []
1572 self.repo_rhodecode_setting_ids = []
1571
1573
1572 def create_repo_rhodecode_ui(
1574 def create_repo_rhodecode_ui(
1573 self, repo, section, value, key=None, active=True, cleanup=True):
1575 self, repo, section, value, key=None, active=True, cleanup=True):
1574 key = key or hashlib.sha1(
1576 key = key or hashlib.sha1(
1575 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1577 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1576
1578
1577 setting = RepoRhodeCodeUi()
1579 setting = RepoRhodeCodeUi()
1578 setting.repository_id = repo.repo_id
1580 setting.repository_id = repo.repo_id
1579 setting.ui_section = section
1581 setting.ui_section = section
1580 setting.ui_value = value
1582 setting.ui_value = value
1581 setting.ui_key = key
1583 setting.ui_key = key
1582 setting.ui_active = active
1584 setting.ui_active = active
1583 Session().add(setting)
1585 Session().add(setting)
1584 Session().commit()
1586 Session().commit()
1585
1587
1586 if cleanup:
1588 if cleanup:
1587 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1589 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1588 return setting
1590 return setting
1589
1591
1590 def create_rhodecode_ui(
1592 def create_rhodecode_ui(
1591 self, section, value, key=None, active=True, cleanup=True):
1593 self, section, value, key=None, active=True, cleanup=True):
1592 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1594 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1593
1595
1594 setting = RhodeCodeUi()
1596 setting = RhodeCodeUi()
1595 setting.ui_section = section
1597 setting.ui_section = section
1596 setting.ui_value = value
1598 setting.ui_value = value
1597 setting.ui_key = key
1599 setting.ui_key = key
1598 setting.ui_active = active
1600 setting.ui_active = active
1599 Session().add(setting)
1601 Session().add(setting)
1600 Session().commit()
1602 Session().commit()
1601
1603
1602 if cleanup:
1604 if cleanup:
1603 self.rhodecode_ui_ids.append(setting.ui_id)
1605 self.rhodecode_ui_ids.append(setting.ui_id)
1604 return setting
1606 return setting
1605
1607
1606 def create_repo_rhodecode_setting(
1608 def create_repo_rhodecode_setting(
1607 self, repo, name, value, type_, cleanup=True):
1609 self, repo, name, value, type_, cleanup=True):
1608 setting = RepoRhodeCodeSetting(
1610 setting = RepoRhodeCodeSetting(
1609 repo.repo_id, key=name, val=value, type=type_)
1611 repo.repo_id, key=name, val=value, type=type_)
1610 Session().add(setting)
1612 Session().add(setting)
1611 Session().commit()
1613 Session().commit()
1612
1614
1613 if cleanup:
1615 if cleanup:
1614 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1616 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1615 return setting
1617 return setting
1616
1618
1617 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1619 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1618 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1620 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1619 Session().add(setting)
1621 Session().add(setting)
1620 Session().commit()
1622 Session().commit()
1621
1623
1622 if cleanup:
1624 if cleanup:
1623 self.rhodecode_setting_ids.append(setting.app_settings_id)
1625 self.rhodecode_setting_ids.append(setting.app_settings_id)
1624
1626
1625 return setting
1627 return setting
1626
1628
1627 def cleanup(self):
1629 def cleanup(self):
1628 for id_ in self.rhodecode_ui_ids:
1630 for id_ in self.rhodecode_ui_ids:
1629 setting = RhodeCodeUi.get(id_)
1631 setting = RhodeCodeUi.get(id_)
1630 Session().delete(setting)
1632 Session().delete(setting)
1631
1633
1632 for id_ in self.rhodecode_setting_ids:
1634 for id_ in self.rhodecode_setting_ids:
1633 setting = RhodeCodeSetting.get(id_)
1635 setting = RhodeCodeSetting.get(id_)
1634 Session().delete(setting)
1636 Session().delete(setting)
1635
1637
1636 for id_ in self.repo_rhodecode_ui_ids:
1638 for id_ in self.repo_rhodecode_ui_ids:
1637 setting = RepoRhodeCodeUi.get(id_)
1639 setting = RepoRhodeCodeUi.get(id_)
1638 Session().delete(setting)
1640 Session().delete(setting)
1639
1641
1640 for id_ in self.repo_rhodecode_setting_ids:
1642 for id_ in self.repo_rhodecode_setting_ids:
1641 setting = RepoRhodeCodeSetting.get(id_)
1643 setting = RepoRhodeCodeSetting.get(id_)
1642 Session().delete(setting)
1644 Session().delete(setting)
1643
1645
1644 Session().commit()
1646 Session().commit()
1645
1647
1646
1648
1647 @pytest.fixture
1649 @pytest.fixture
1648 def no_notifications(request):
1650 def no_notifications(request):
1649 notification_patcher = mock.patch(
1651 notification_patcher = mock.patch(
1650 'rhodecode.model.notification.NotificationModel.create')
1652 'rhodecode.model.notification.NotificationModel.create')
1651 notification_patcher.start()
1653 notification_patcher.start()
1652 request.addfinalizer(notification_patcher.stop)
1654 request.addfinalizer(notification_patcher.stop)
1653
1655
1654
1656
1655 @pytest.fixture(scope='session')
1657 @pytest.fixture(scope='session')
1656 def repeat(request):
1658 def repeat(request):
1657 """
1659 """
1658 The number of repetitions is based on this fixture.
1660 The number of repetitions is based on this fixture.
1659
1661
1660 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1662 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1661 tests are not too slow in our default test suite.
1663 tests are not too slow in our default test suite.
1662 """
1664 """
1663 return request.config.getoption('--repeat')
1665 return request.config.getoption('--repeat')
1664
1666
1665
1667
1666 @pytest.fixture
1668 @pytest.fixture
1667 def rhodecode_fixtures():
1669 def rhodecode_fixtures():
1668 return Fixture()
1670 return Fixture()
1669
1671
1670
1672
1671 @pytest.fixture
1673 @pytest.fixture
1672 def context_stub():
1674 def context_stub():
1673 """
1675 """
1674 Stub context object.
1676 Stub context object.
1675 """
1677 """
1676 context = pyramid.testing.DummyResource()
1678 context = pyramid.testing.DummyResource()
1677 return context
1679 return context
1678
1680
1679
1681
1680 @pytest.fixture
1682 @pytest.fixture
1681 def request_stub():
1683 def request_stub():
1682 """
1684 """
1683 Stub request object.
1685 Stub request object.
1684 """
1686 """
1685 from rhodecode.lib.base import bootstrap_request
1687 from rhodecode.lib.base import bootstrap_request
1686 request = bootstrap_request(scheme='https')
1688 request = bootstrap_request(scheme='https')
1687 return request
1689 return request
1688
1690
1689
1691
1690 @pytest.fixture
1692 @pytest.fixture
1691 def config_stub(request, request_stub):
1693 def config_stub(request, request_stub):
1692 """
1694 """
1693 Set up pyramid.testing and return the Configurator.
1695 Set up pyramid.testing and return the Configurator.
1694 """
1696 """
1695 from rhodecode.lib.base import bootstrap_config
1697 from rhodecode.lib.base import bootstrap_config
1696 config = bootstrap_config(request=request_stub)
1698 config = bootstrap_config(request=request_stub)
1697
1699
1698 @request.addfinalizer
1700 @request.addfinalizer
1699 def cleanup():
1701 def cleanup():
1700 pyramid.testing.tearDown()
1702 pyramid.testing.tearDown()
1701
1703
1702 return config
1704 return config
1703
1705
1704
1706
1705 @pytest.fixture
1707 @pytest.fixture
1706 def StubIntegrationType():
1708 def StubIntegrationType():
1707 class _StubIntegrationType(IntegrationTypeBase):
1709 class _StubIntegrationType(IntegrationTypeBase):
1708 """ Test integration type class """
1710 """ Test integration type class """
1709
1711
1710 key = 'test'
1712 key = 'test'
1711 display_name = 'Test integration type'
1713 display_name = 'Test integration type'
1712 description = 'A test integration type for testing'
1714 description = 'A test integration type for testing'
1713
1715
1714 @classmethod
1716 @classmethod
1715 def icon(cls):
1717 def icon(cls):
1716 return 'test_icon_html_image'
1718 return 'test_icon_html_image'
1717
1719
1718 def __init__(self, settings):
1720 def __init__(self, settings):
1719 super(_StubIntegrationType, self).__init__(settings)
1721 super(_StubIntegrationType, self).__init__(settings)
1720 self.sent_events = [] # for testing
1722 self.sent_events = [] # for testing
1721
1723
1722 def send_event(self, event):
1724 def send_event(self, event):
1723 self.sent_events.append(event)
1725 self.sent_events.append(event)
1724
1726
1725 def settings_schema(self):
1727 def settings_schema(self):
1726 class SettingsSchema(colander.Schema):
1728 class SettingsSchema(colander.Schema):
1727 test_string_field = colander.SchemaNode(
1729 test_string_field = colander.SchemaNode(
1728 colander.String(),
1730 colander.String(),
1729 missing=colander.required,
1731 missing=colander.required,
1730 title='test string field',
1732 title='test string field',
1731 )
1733 )
1732 test_int_field = colander.SchemaNode(
1734 test_int_field = colander.SchemaNode(
1733 colander.Int(),
1735 colander.Int(),
1734 title='some integer setting',
1736 title='some integer setting',
1735 )
1737 )
1736 return SettingsSchema()
1738 return SettingsSchema()
1737
1739
1738
1740
1739 integration_type_registry.register_integration_type(_StubIntegrationType)
1741 integration_type_registry.register_integration_type(_StubIntegrationType)
1740 return _StubIntegrationType
1742 return _StubIntegrationType
1741
1743
1742 @pytest.fixture
1744 @pytest.fixture
1743 def stub_integration_settings():
1745 def stub_integration_settings():
1744 return {
1746 return {
1745 'test_string_field': 'some data',
1747 'test_string_field': 'some data',
1746 'test_int_field': 100,
1748 'test_int_field': 100,
1747 }
1749 }
1748
1750
1749
1751
1750 @pytest.fixture
1752 @pytest.fixture
1751 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1753 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1752 stub_integration_settings):
1754 stub_integration_settings):
1753 integration = IntegrationModel().create(
1755 integration = IntegrationModel().create(
1754 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1756 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1755 name='test repo integration',
1757 name='test repo integration',
1756 repo=repo_stub, repo_group=None, child_repos_only=None)
1758 repo=repo_stub, repo_group=None, child_repos_only=None)
1757
1759
1758 @request.addfinalizer
1760 @request.addfinalizer
1759 def cleanup():
1761 def cleanup():
1760 IntegrationModel().delete(integration)
1762 IntegrationModel().delete(integration)
1761
1763
1762 return integration
1764 return integration
1763
1765
1764
1766
1765 @pytest.fixture
1767 @pytest.fixture
1766 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1768 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1767 stub_integration_settings):
1769 stub_integration_settings):
1768 integration = IntegrationModel().create(
1770 integration = IntegrationModel().create(
1769 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1771 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1770 name='test repogroup integration',
1772 name='test repogroup integration',
1771 repo=None, repo_group=test_repo_group, child_repos_only=True)
1773 repo=None, repo_group=test_repo_group, child_repos_only=True)
1772
1774
1773 @request.addfinalizer
1775 @request.addfinalizer
1774 def cleanup():
1776 def cleanup():
1775 IntegrationModel().delete(integration)
1777 IntegrationModel().delete(integration)
1776
1778
1777 return integration
1779 return integration
1778
1780
1779
1781
1780 @pytest.fixture
1782 @pytest.fixture
1781 def repogroup_recursive_integration_stub(request, test_repo_group,
1783 def repogroup_recursive_integration_stub(request, test_repo_group,
1782 StubIntegrationType, stub_integration_settings):
1784 StubIntegrationType, stub_integration_settings):
1783 integration = IntegrationModel().create(
1785 integration = IntegrationModel().create(
1784 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1786 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1785 name='test recursive repogroup integration',
1787 name='test recursive repogroup integration',
1786 repo=None, repo_group=test_repo_group, child_repos_only=False)
1788 repo=None, repo_group=test_repo_group, child_repos_only=False)
1787
1789
1788 @request.addfinalizer
1790 @request.addfinalizer
1789 def cleanup():
1791 def cleanup():
1790 IntegrationModel().delete(integration)
1792 IntegrationModel().delete(integration)
1791
1793
1792 return integration
1794 return integration
1793
1795
1794
1796
1795 @pytest.fixture
1797 @pytest.fixture
1796 def global_integration_stub(request, StubIntegrationType,
1798 def global_integration_stub(request, StubIntegrationType,
1797 stub_integration_settings):
1799 stub_integration_settings):
1798 integration = IntegrationModel().create(
1800 integration = IntegrationModel().create(
1799 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1801 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1800 name='test global integration',
1802 name='test global integration',
1801 repo=None, repo_group=None, child_repos_only=None)
1803 repo=None, repo_group=None, child_repos_only=None)
1802
1804
1803 @request.addfinalizer
1805 @request.addfinalizer
1804 def cleanup():
1806 def cleanup():
1805 IntegrationModel().delete(integration)
1807 IntegrationModel().delete(integration)
1806
1808
1807 return integration
1809 return integration
1808
1810
1809
1811
1810 @pytest.fixture
1812 @pytest.fixture
1811 def root_repos_integration_stub(request, StubIntegrationType,
1813 def root_repos_integration_stub(request, StubIntegrationType,
1812 stub_integration_settings):
1814 stub_integration_settings):
1813 integration = IntegrationModel().create(
1815 integration = IntegrationModel().create(
1814 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1816 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1815 name='test global integration',
1817 name='test global integration',
1816 repo=None, repo_group=None, child_repos_only=True)
1818 repo=None, repo_group=None, child_repos_only=True)
1817
1819
1818 @request.addfinalizer
1820 @request.addfinalizer
1819 def cleanup():
1821 def cleanup():
1820 IntegrationModel().delete(integration)
1822 IntegrationModel().delete(integration)
1821
1823
1822 return integration
1824 return integration
1823
1825
1824
1826
1825 @pytest.fixture
1827 @pytest.fixture
1826 def local_dt_to_utc():
1828 def local_dt_to_utc():
1827 def _factory(dt):
1829 def _factory(dt):
1828 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1830 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1829 dateutil.tz.tzutc()).replace(tzinfo=None)
1831 dateutil.tz.tzutc()).replace(tzinfo=None)
1830 return _factory
1832 return _factory
1831
1833
1832
1834
1833 @pytest.fixture
1835 @pytest.fixture
1834 def disable_anonymous_user(request, baseapp):
1836 def disable_anonymous_user(request, baseapp):
1835 set_anonymous_access(False)
1837 set_anonymous_access(False)
1836
1838
1837 @request.addfinalizer
1839 @request.addfinalizer
1838 def cleanup():
1840 def cleanup():
1839 set_anonymous_access(True)
1841 set_anonymous_access(True)
1840
1842
1841
1843
1842 @pytest.fixture(scope='module')
1844 @pytest.fixture(scope='module')
1843 def rc_fixture(request):
1845 def rc_fixture(request):
1844 return Fixture()
1846 return Fixture()
1845
1847
1846
1848
1847 @pytest.fixture
1849 @pytest.fixture
1848 def repo_groups(request):
1850 def repo_groups(request):
1849 fixture = Fixture()
1851 fixture = Fixture()
1850
1852
1851 session = Session()
1853 session = Session()
1852 zombie_group = fixture.create_repo_group('zombie')
1854 zombie_group = fixture.create_repo_group('zombie')
1853 parent_group = fixture.create_repo_group('parent')
1855 parent_group = fixture.create_repo_group('parent')
1854 child_group = fixture.create_repo_group('parent/child')
1856 child_group = fixture.create_repo_group('parent/child')
1855 groups_in_db = session.query(RepoGroup).all()
1857 groups_in_db = session.query(RepoGroup).all()
1856 assert len(groups_in_db) == 3
1858 assert len(groups_in_db) == 3
1857 assert child_group.group_parent_id == parent_group.group_id
1859 assert child_group.group_parent_id == parent_group.group_id
1858
1860
1859 @request.addfinalizer
1861 @request.addfinalizer
1860 def cleanup():
1862 def cleanup():
1861 fixture.destroy_repo_group(zombie_group)
1863 fixture.destroy_repo_group(zombie_group)
1862 fixture.destroy_repo_group(child_group)
1864 fixture.destroy_repo_group(child_group)
1863 fixture.destroy_repo_group(parent_group)
1865 fixture.destroy_repo_group(parent_group)
1864
1866
1865 return zombie_group, parent_group, child_group
1867 return zombie_group, parent_group, child_group
@@ -1,1289 +1,1288 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import mock
22 import mock
23 import os
23 import os
24 import sys
24 import sys
25 import shutil
25 import shutil
26
26
27 import pytest
27 import pytest
28
28
29 from rhodecode.lib.utils import make_db_config
29 from rhodecode.lib.utils import make_db_config
30 from rhodecode.lib.vcs.backends.base import Reference
30 from rhodecode.lib.vcs.backends.base import Reference
31 from rhodecode.lib.vcs.backends.git import (
31 from rhodecode.lib.vcs.backends.git import (
32 GitRepository, GitCommit, discover_git_version)
32 GitRepository, GitCommit, discover_git_version)
33 from rhodecode.lib.vcs.exceptions import (
33 from rhodecode.lib.vcs.exceptions import (
34 RepositoryError, VCSError, NodeDoesNotExistError)
34 RepositoryError, VCSError, NodeDoesNotExistError)
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39
39
40
40
41 pytestmark = pytest.mark.backends("git")
41 pytestmark = pytest.mark.backends("git")
42
42
43
43
44 def repo_path_generator():
44 def repo_path_generator():
45 """
45 """
46 Return a different path to be used for cloning repos.
46 Return a different path to be used for cloning repos.
47 """
47 """
48 i = 0
48 i = 0
49 while True:
49 while True:
50 i += 1
50 i += 1
51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
52
52
53
53
54 REPO_PATH_GENERATOR = repo_path_generator()
54 REPO_PATH_GENERATOR = repo_path_generator()
55
55
56
56
57 class TestGitRepository:
57 class TestGitRepository:
58
58
59 # pylint: disable=protected-access
59 # pylint: disable=protected-access
60
60
61 def __check_for_existing_repo(self):
61 def __check_for_existing_repo(self):
62 if os.path.exists(TEST_GIT_REPO_CLONE):
62 if os.path.exists(TEST_GIT_REPO_CLONE):
63 self.fail('Cannot test git clone repo as location %s already '
63 self.fail('Cannot test git clone repo as location %s already '
64 'exists. You should manually remove it first.'
64 'exists. You should manually remove it first.'
65 % TEST_GIT_REPO_CLONE)
65 % TEST_GIT_REPO_CLONE)
66
66
67 @pytest.fixture(autouse=True)
67 @pytest.fixture(autouse=True)
68 def prepare(self, request, baseapp):
68 def prepare(self, request, baseapp):
69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
70
70
71 def get_clone_repo(self):
71 def get_clone_repo(self):
72 """
72 """
73 Return a non bare clone of the base repo.
73 Return a non bare clone of the base repo.
74 """
74 """
75 clone_path = next(REPO_PATH_GENERATOR)
75 clone_path = next(REPO_PATH_GENERATOR)
76 repo_clone = GitRepository(
76 repo_clone = GitRepository(
77 clone_path, create=True, src_url=self.repo.path, bare=False)
77 clone_path, create=True, src_url=self.repo.path, bare=False)
78
78
79 return repo_clone
79 return repo_clone
80
80
81 def get_empty_repo(self, bare=False):
81 def get_empty_repo(self, bare=False):
82 """
82 """
83 Return a non bare empty repo.
83 Return a non bare empty repo.
84 """
84 """
85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
86
86
87 def test_wrong_repo_path(self):
87 def test_wrong_repo_path(self):
88 wrong_repo_path = '/tmp/errorrepo_git'
88 wrong_repo_path = '/tmp/errorrepo_git'
89 with pytest.raises(RepositoryError):
89 with pytest.raises(RepositoryError):
90 GitRepository(wrong_repo_path)
90 GitRepository(wrong_repo_path)
91
91
92 def test_repo_clone(self):
92 def test_repo_clone(self):
93 self.__check_for_existing_repo()
93 self.__check_for_existing_repo()
94 repo = GitRepository(TEST_GIT_REPO)
94 repo = GitRepository(TEST_GIT_REPO)
95 repo_clone = GitRepository(
95 repo_clone = GitRepository(
96 TEST_GIT_REPO_CLONE,
96 TEST_GIT_REPO_CLONE,
97 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
97 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99 # Checking hashes of commits should be enough
99 # Checking hashes of commits should be enough
100 for commit in repo.get_commits():
100 for commit in repo.get_commits():
101 raw_id = commit.raw_id
101 raw_id = commit.raw_id
102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
103
103
104 def test_repo_clone_without_create(self):
104 def test_repo_clone_without_create(self):
105 with pytest.raises(RepositoryError):
105 with pytest.raises(RepositoryError):
106 GitRepository(
106 GitRepository(
107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
108
108
109 def test_repo_clone_with_update(self):
109 def test_repo_clone_with_update(self):
110 repo = GitRepository(TEST_GIT_REPO)
110 repo = GitRepository(TEST_GIT_REPO)
111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
112 repo_clone = GitRepository(
112 repo_clone = GitRepository(
113 clone_path,
113 clone_path,
114 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
114 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116
116
117 # check if current workdir was updated
117 # check if current workdir was updated
118 fpath = os.path.join(clone_path, 'MANIFEST.in')
118 fpath = os.path.join(clone_path, 'MANIFEST.in')
119 assert os.path.isfile(fpath)
119 assert os.path.isfile(fpath)
120
120
121 def test_repo_clone_without_update(self):
121 def test_repo_clone_without_update(self):
122 repo = GitRepository(TEST_GIT_REPO)
122 repo = GitRepository(TEST_GIT_REPO)
123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
124 repo_clone = GitRepository(
124 repo_clone = GitRepository(
125 clone_path,
125 clone_path,
126 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
126 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
128 # check if current workdir was *NOT* updated
128 # check if current workdir was *NOT* updated
129 fpath = os.path.join(clone_path, 'MANIFEST.in')
129 fpath = os.path.join(clone_path, 'MANIFEST.in')
130 # Make sure it's not bare repo
130 # Make sure it's not bare repo
131 assert not repo_clone.bare
131 assert not repo_clone.bare
132 assert not os.path.isfile(fpath)
132 assert not os.path.isfile(fpath)
133
133
134 def test_repo_clone_into_bare_repo(self):
134 def test_repo_clone_into_bare_repo(self):
135 repo = GitRepository(TEST_GIT_REPO)
135 repo = GitRepository(TEST_GIT_REPO)
136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
137 repo_clone = GitRepository(
137 repo_clone = GitRepository(
138 clone_path, create=True, src_url=repo.path, bare=True)
138 clone_path, create=True, src_url=repo.path, bare=True)
139 assert repo_clone.bare
139 assert repo_clone.bare
140
140
141 def test_create_repo_is_not_bare_by_default(self):
141 def test_create_repo_is_not_bare_by_default(self):
142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
143 assert not repo.bare
143 assert not repo.bare
144
144
145 def test_create_bare_repo(self):
145 def test_create_bare_repo(self):
146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
147 assert repo.bare
147 assert repo.bare
148
148
149 def test_update_server_info(self):
149 def test_update_server_info(self):
150 self.repo._update_server_info()
150 self.repo._update_server_info()
151
151
152 def test_fetch(self, vcsbackend_git):
152 def test_fetch(self, vcsbackend_git):
153 # Note: This is a git specific part of the API, it's only implemented
153 # Note: This is a git specific part of the API, it's only implemented
154 # by the git backend.
154 # by the git backend.
155 source_repo = vcsbackend_git.repo
155 source_repo = vcsbackend_git.repo
156 target_repo = vcsbackend_git.create_repo()
156 target_repo = vcsbackend_git.create_repo(bare=True)
157 target_repo.fetch(source_repo.path)
157 target_repo.fetch(source_repo.path)
158 # Note: Get a fresh instance, avoids caching trouble
158 # Note: Get a fresh instance, avoids caching trouble
159 target_repo = vcsbackend_git.backend(target_repo.path)
159 target_repo = vcsbackend_git.backend(target_repo.path)
160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
161
161
162 def test_commit_ids(self):
162 def test_commit_ids(self):
163 # there are 112 commits (by now)
163 # there are 112 commits (by now)
164 # so we can assume they would be available from now on
164 # so we can assume they would be available from now on
165 subset = set([
165 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 '102607b09cdd60e2793929c4f90478be29f85a17',
169 '102607b09cdd60e2793929c4f90478be29f85a17',
169 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 '2d1028c054665b962fa3d307adfc923ddd528038',
171 '2d1028c054665b962fa3d307adfc923ddd528038',
171 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 '8430a588b43b5d6da365400117c89400326e7992',
175 '8430a588b43b5d6da365400117c89400326e7992',
175 'd955cd312c17b02143c04fa1099a352b04368118',
176 'd955cd312c17b02143c04fa1099a352b04368118',
176 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
190 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
191 assert subset.issubset(set(self.repo.commit_ids))
190 assert subset.issubset(set(self.repo.commit_ids))
192
191
193 def test_slicing(self):
192 def test_slicing(self):
194 # 4 1 5 10 95
193 # 4 1 5 10 95
195 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
194 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
196 (10, 20, 10), (5, 100, 95)]:
195 (10, 20, 10), (5, 100, 95)]:
197 commit_ids = list(self.repo[sfrom:sto])
196 commit_ids = list(self.repo[sfrom:sto])
198 assert len(commit_ids) == size
197 assert len(commit_ids) == size
199 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
198 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
200 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
199 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
201
200
202 def test_branches(self):
201 def test_branches(self):
203 # TODO: Need more tests here
202 # TODO: Need more tests here
204 # Removed (those are 'remotes' branches for cloned repo)
203 # Removed (those are 'remotes' branches for cloned repo)
205 # assert 'master' in self.repo.branches
204 # assert 'master' in self.repo.branches
206 # assert 'gittree' in self.repo.branches
205 # assert 'gittree' in self.repo.branches
207 # assert 'web-branch' in self.repo.branches
206 # assert 'web-branch' in self.repo.branches
208 for __, commit_id in self.repo.branches.items():
207 for __, commit_id in self.repo.branches.items():
209 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
208 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
210
209
211 def test_tags(self):
210 def test_tags(self):
212 # TODO: Need more tests here
211 # TODO: Need more tests here
213 assert 'v0.1.1' in self.repo.tags
212 assert 'v0.1.1' in self.repo.tags
214 assert 'v0.1.2' in self.repo.tags
213 assert 'v0.1.2' in self.repo.tags
215 for __, commit_id in self.repo.tags.items():
214 for __, commit_id in self.repo.tags.items():
216 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
215 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
217
216
218 def _test_single_commit_cache(self, commit_id):
217 def _test_single_commit_cache(self, commit_id):
219 commit = self.repo.get_commit(commit_id)
218 commit = self.repo.get_commit(commit_id)
220 assert commit_id in self.repo.commits
219 assert commit_id in self.repo.commits
221 assert commit is self.repo.commits[commit_id]
220 assert commit is self.repo.commits[commit_id]
222
221
223 def test_initial_commit(self):
222 def test_initial_commit(self):
224 commit_id = self.repo.commit_ids[0]
223 commit_id = self.repo.commit_ids[0]
225 init_commit = self.repo.get_commit(commit_id)
224 init_commit = self.repo.get_commit(commit_id)
226 init_author = init_commit.author
225 init_author = init_commit.author
227
226
228 assert init_commit.message == 'initial import\n'
227 assert init_commit.message == 'initial import\n'
229 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
228 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
230 assert init_author == init_commit.committer
229 assert init_author == init_commit.committer
231 for path in ('vcs/__init__.py',
230 for path in ('vcs/__init__.py',
232 'vcs/backends/BaseRepository.py',
231 'vcs/backends/BaseRepository.py',
233 'vcs/backends/__init__.py'):
232 'vcs/backends/__init__.py'):
234 assert isinstance(init_commit.get_node(path), FileNode)
233 assert isinstance(init_commit.get_node(path), FileNode)
235 for path in ('', 'vcs', 'vcs/backends'):
234 for path in ('', 'vcs', 'vcs/backends'):
236 assert isinstance(init_commit.get_node(path), DirNode)
235 assert isinstance(init_commit.get_node(path), DirNode)
237
236
238 with pytest.raises(NodeDoesNotExistError):
237 with pytest.raises(NodeDoesNotExistError):
239 init_commit.get_node(path='foobar')
238 init_commit.get_node(path='foobar')
240
239
241 node = init_commit.get_node('vcs/')
240 node = init_commit.get_node('vcs/')
242 assert hasattr(node, 'kind')
241 assert hasattr(node, 'kind')
243 assert node.kind == NodeKind.DIR
242 assert node.kind == NodeKind.DIR
244
243
245 node = init_commit.get_node('vcs')
244 node = init_commit.get_node('vcs')
246 assert hasattr(node, 'kind')
245 assert hasattr(node, 'kind')
247 assert node.kind == NodeKind.DIR
246 assert node.kind == NodeKind.DIR
248
247
249 node = init_commit.get_node('vcs/__init__.py')
248 node = init_commit.get_node('vcs/__init__.py')
250 assert hasattr(node, 'kind')
249 assert hasattr(node, 'kind')
251 assert node.kind == NodeKind.FILE
250 assert node.kind == NodeKind.FILE
252
251
253 def test_not_existing_commit(self):
252 def test_not_existing_commit(self):
254 with pytest.raises(RepositoryError):
253 with pytest.raises(RepositoryError):
255 self.repo.get_commit('f' * 40)
254 self.repo.get_commit('f' * 40)
256
255
257 def test_commit10(self):
256 def test_commit10(self):
258
257
259 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
258 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
260 README = """===
259 README = """===
261 VCS
260 VCS
262 ===
261 ===
263
262
264 Various Version Control System management abstraction layer for Python.
263 Various Version Control System management abstraction layer for Python.
265
264
266 Introduction
265 Introduction
267 ------------
266 ------------
268
267
269 TODO: To be written...
268 TODO: To be written...
270
269
271 """
270 """
272 node = commit10.get_node('README.rst')
271 node = commit10.get_node('README.rst')
273 assert node.kind == NodeKind.FILE
272 assert node.kind == NodeKind.FILE
274 assert node.content == README
273 assert node.content == README
275
274
276 def test_head(self):
275 def test_head(self):
277 assert self.repo.head == self.repo.get_commit().raw_id
276 assert self.repo.head == self.repo.get_commit().raw_id
278
277
279 def test_checkout_with_create(self):
278 def test_checkout_with_create(self):
280 repo_clone = self.get_clone_repo()
279 repo_clone = self.get_clone_repo()
281
280
282 new_branch = 'new_branch'
281 new_branch = 'new_branch'
283 assert repo_clone._current_branch() == 'master'
282 assert repo_clone._current_branch() == 'master'
284 assert set(repo_clone.branches) == set(('master',))
283 assert set(repo_clone.branches) == {'master'}
285 repo_clone._checkout(new_branch, create=True)
284 repo_clone._checkout(new_branch, create=True)
286
285
287 # Branches is a lazy property so we need to recrete the Repo object.
286 # Branches is a lazy property so we need to recrete the Repo object.
288 repo_clone = GitRepository(repo_clone.path)
287 repo_clone = GitRepository(repo_clone.path)
289 assert set(repo_clone.branches) == set(('master', new_branch))
288 assert set(repo_clone.branches) == {'master', new_branch}
290 assert repo_clone._current_branch() == new_branch
289 assert repo_clone._current_branch() == new_branch
291
290
292 def test_checkout(self):
291 def test_checkout(self):
293 repo_clone = self.get_clone_repo()
292 repo_clone = self.get_clone_repo()
294
293
295 repo_clone._checkout('new_branch', create=True)
294 repo_clone._checkout('new_branch', create=True)
296 repo_clone._checkout('master')
295 repo_clone._checkout('master')
297
296
298 assert repo_clone._current_branch() == 'master'
297 assert repo_clone._current_branch() == 'master'
299
298
300 def test_checkout_same_branch(self):
299 def test_checkout_same_branch(self):
301 repo_clone = self.get_clone_repo()
300 repo_clone = self.get_clone_repo()
302
301
303 repo_clone._checkout('master')
302 repo_clone._checkout('master')
304 assert repo_clone._current_branch() == 'master'
303 assert repo_clone._current_branch() == 'master'
305
304
306 def test_checkout_branch_already_exists(self):
305 def test_checkout_branch_already_exists(self):
307 repo_clone = self.get_clone_repo()
306 repo_clone = self.get_clone_repo()
308
307
309 with pytest.raises(RepositoryError):
308 with pytest.raises(RepositoryError):
310 repo_clone._checkout('master', create=True)
309 repo_clone._checkout('master', create=True)
311
310
312 def test_checkout_bare_repo(self):
311 def test_checkout_bare_repo(self):
313 with pytest.raises(RepositoryError):
312 with pytest.raises(RepositoryError):
314 self.repo._checkout('master')
313 self.repo._checkout('master')
315
314
316 def test_current_branch_bare_repo(self):
315 def test_current_branch_bare_repo(self):
317 with pytest.raises(RepositoryError):
316 with pytest.raises(RepositoryError):
318 self.repo._current_branch()
317 self.repo._current_branch()
319
318
320 def test_current_branch_empty_repo(self):
319 def test_current_branch_empty_repo(self):
321 repo = self.get_empty_repo()
320 repo = self.get_empty_repo()
322 assert repo._current_branch() is None
321 assert repo._current_branch() is None
323
322
324 def test_local_clone(self):
323 def test_local_clone(self):
325 clone_path = next(REPO_PATH_GENERATOR)
324 clone_path = next(REPO_PATH_GENERATOR)
326 self.repo._local_clone(clone_path, 'master')
325 self.repo._local_clone(clone_path, 'master')
327 repo_clone = GitRepository(clone_path)
326 repo_clone = GitRepository(clone_path)
328
327
329 assert self.repo.commit_ids == repo_clone.commit_ids
328 assert self.repo.commit_ids == repo_clone.commit_ids
330
329
331 def test_local_clone_with_specific_branch(self):
330 def test_local_clone_with_specific_branch(self):
332 source_repo = self.get_clone_repo()
331 source_repo = self.get_clone_repo()
333
332
334 # Create a new branch in source repo
333 # Create a new branch in source repo
335 new_branch_commit = source_repo.commit_ids[-3]
334 new_branch_commit = source_repo.commit_ids[-3]
336 source_repo._checkout(new_branch_commit)
335 source_repo._checkout(new_branch_commit)
337 source_repo._checkout('new_branch', create=True)
336 source_repo._checkout('new_branch', create=True)
338
337
339 clone_path = next(REPO_PATH_GENERATOR)
338 clone_path = next(REPO_PATH_GENERATOR)
340 source_repo._local_clone(clone_path, 'new_branch')
339 source_repo._local_clone(clone_path, 'new_branch')
341 repo_clone = GitRepository(clone_path)
340 repo_clone = GitRepository(clone_path)
342
341
343 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
342 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
344
343
345 clone_path = next(REPO_PATH_GENERATOR)
344 clone_path = next(REPO_PATH_GENERATOR)
346 source_repo._local_clone(clone_path, 'master')
345 source_repo._local_clone(clone_path, 'master')
347 repo_clone = GitRepository(clone_path)
346 repo_clone = GitRepository(clone_path)
348
347
349 assert source_repo.commit_ids == repo_clone.commit_ids
348 assert source_repo.commit_ids == repo_clone.commit_ids
350
349
351 def test_local_clone_fails_if_target_exists(self):
350 def test_local_clone_fails_if_target_exists(self):
352 with pytest.raises(RepositoryError):
351 with pytest.raises(RepositoryError):
353 self.repo._local_clone(self.repo.path, 'master')
352 self.repo._local_clone(self.repo.path, 'master')
354
353
355 def test_local_fetch(self):
354 def test_local_fetch(self):
356 target_repo = self.get_empty_repo()
355 target_repo = self.get_empty_repo()
357 source_repo = self.get_clone_repo()
356 source_repo = self.get_clone_repo()
358
357
359 # Create a new branch in source repo
358 # Create a new branch in source repo
360 master_commit = source_repo.commit_ids[-1]
359 master_commit = source_repo.commit_ids[-1]
361 new_branch_commit = source_repo.commit_ids[-3]
360 new_branch_commit = source_repo.commit_ids[-3]
362 source_repo._checkout(new_branch_commit)
361 source_repo._checkout(new_branch_commit)
363 source_repo._checkout('new_branch', create=True)
362 source_repo._checkout('new_branch', create=True)
364
363
365 target_repo._local_fetch(source_repo.path, 'new_branch')
364 target_repo._local_fetch(source_repo.path, 'new_branch')
366 assert target_repo._last_fetch_heads() == [new_branch_commit]
365 assert target_repo._last_fetch_heads() == [new_branch_commit]
367
366
368 target_repo._local_fetch(source_repo.path, 'master')
367 target_repo._local_fetch(source_repo.path, 'master')
369 assert target_repo._last_fetch_heads() == [master_commit]
368 assert target_repo._last_fetch_heads() == [master_commit]
370
369
371 def test_local_fetch_from_bare_repo(self):
370 def test_local_fetch_from_bare_repo(self):
372 target_repo = self.get_empty_repo()
371 target_repo = self.get_empty_repo()
373 target_repo._local_fetch(self.repo.path, 'master')
372 target_repo._local_fetch(self.repo.path, 'master')
374
373
375 master_commit = self.repo.commit_ids[-1]
374 master_commit = self.repo.commit_ids[-1]
376 assert target_repo._last_fetch_heads() == [master_commit]
375 assert target_repo._last_fetch_heads() == [master_commit]
377
376
378 def test_local_fetch_from_same_repo(self):
377 def test_local_fetch_from_same_repo(self):
379 with pytest.raises(ValueError):
378 with pytest.raises(ValueError):
380 self.repo._local_fetch(self.repo.path, 'master')
379 self.repo._local_fetch(self.repo.path, 'master')
381
380
382 def test_local_fetch_branch_does_not_exist(self):
381 def test_local_fetch_branch_does_not_exist(self):
383 target_repo = self.get_empty_repo()
382 target_repo = self.get_empty_repo()
384
383
385 with pytest.raises(RepositoryError):
384 with pytest.raises(RepositoryError):
386 target_repo._local_fetch(self.repo.path, 'new_branch')
385 target_repo._local_fetch(self.repo.path, 'new_branch')
387
386
388 def test_local_pull(self):
387 def test_local_pull(self):
389 target_repo = self.get_empty_repo()
388 target_repo = self.get_empty_repo()
390 source_repo = self.get_clone_repo()
389 source_repo = self.get_clone_repo()
391
390
392 # Create a new branch in source repo
391 # Create a new branch in source repo
393 master_commit = source_repo.commit_ids[-1]
392 master_commit = source_repo.commit_ids[-1]
394 new_branch_commit = source_repo.commit_ids[-3]
393 new_branch_commit = source_repo.commit_ids[-3]
395 source_repo._checkout(new_branch_commit)
394 source_repo._checkout(new_branch_commit)
396 source_repo._checkout('new_branch', create=True)
395 source_repo._checkout('new_branch', create=True)
397
396
398 target_repo._local_pull(source_repo.path, 'new_branch')
397 target_repo._local_pull(source_repo.path, 'new_branch')
399 target_repo = GitRepository(target_repo.path)
398 target_repo = GitRepository(target_repo.path)
400 assert target_repo.head == new_branch_commit
399 assert target_repo.head == new_branch_commit
401
400
402 target_repo._local_pull(source_repo.path, 'master')
401 target_repo._local_pull(source_repo.path, 'master')
403 target_repo = GitRepository(target_repo.path)
402 target_repo = GitRepository(target_repo.path)
404 assert target_repo.head == master_commit
403 assert target_repo.head == master_commit
405
404
406 def test_local_pull_in_bare_repo(self):
405 def test_local_pull_in_bare_repo(self):
407 with pytest.raises(RepositoryError):
406 with pytest.raises(RepositoryError):
408 self.repo._local_pull(self.repo.path, 'master')
407 self.repo._local_pull(self.repo.path, 'master')
409
408
410 def test_local_merge(self):
409 def test_local_merge(self):
411 target_repo = self.get_empty_repo()
410 target_repo = self.get_empty_repo()
412 source_repo = self.get_clone_repo()
411 source_repo = self.get_clone_repo()
413
412
414 # Create a new branch in source repo
413 # Create a new branch in source repo
415 master_commit = source_repo.commit_ids[-1]
414 master_commit = source_repo.commit_ids[-1]
416 new_branch_commit = source_repo.commit_ids[-3]
415 new_branch_commit = source_repo.commit_ids[-3]
417 source_repo._checkout(new_branch_commit)
416 source_repo._checkout(new_branch_commit)
418 source_repo._checkout('new_branch', create=True)
417 source_repo._checkout('new_branch', create=True)
419
418
420 # This is required as one cannot do a -ff-only merge in an empty repo.
419 # This is required as one cannot do a -ff-only merge in an empty repo.
421 target_repo._local_pull(source_repo.path, 'new_branch')
420 target_repo._local_pull(source_repo.path, 'new_branch')
422
421
423 target_repo._local_fetch(source_repo.path, 'master')
422 target_repo._local_fetch(source_repo.path, 'master')
424 merge_message = 'Merge message\n\nDescription:...'
423 merge_message = 'Merge message\n\nDescription:...'
425 user_name = 'Albert Einstein'
424 user_name = 'Albert Einstein'
426 user_email = 'albert@einstein.com'
425 user_email = 'albert@einstein.com'
427 target_repo._local_merge(merge_message, user_name, user_email,
426 target_repo._local_merge(merge_message, user_name, user_email,
428 target_repo._last_fetch_heads())
427 target_repo._last_fetch_heads())
429
428
430 target_repo = GitRepository(target_repo.path)
429 target_repo = GitRepository(target_repo.path)
431 assert target_repo.commit_ids[-2] == master_commit
430 assert target_repo.commit_ids[-2] == master_commit
432 last_commit = target_repo.get_commit(target_repo.head)
431 last_commit = target_repo.get_commit(target_repo.head)
433 assert last_commit.message.strip() == merge_message
432 assert last_commit.message.strip() == merge_message
434 assert last_commit.author == '%s <%s>' % (user_name, user_email)
433 assert last_commit.author == '%s <%s>' % (user_name, user_email)
435
434
436 assert not os.path.exists(
435 assert not os.path.exists(
437 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
436 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
438
437
439 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
438 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
440 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
439 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
441 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
440 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
442
441
443 target_repo._local_fetch(self.repo.path, 'master')
442 target_repo._local_fetch(self.repo.path, 'master')
444 with pytest.raises(RepositoryError):
443 with pytest.raises(RepositoryError):
445 target_repo._local_merge(
444 target_repo._local_merge(
446 'merge_message', 'user name', 'user@name.com',
445 'merge_message', 'user name', 'user@name.com',
447 target_repo._last_fetch_heads())
446 target_repo._last_fetch_heads())
448
447
449 # Check we are not left in an intermediate merge state
448 # Check we are not left in an intermediate merge state
450 assert not os.path.exists(
449 assert not os.path.exists(
451 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
450 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
452
451
453 def test_local_merge_into_empty_repo(self):
452 def test_local_merge_into_empty_repo(self):
454 target_repo = self.get_empty_repo()
453 target_repo = self.get_empty_repo()
455
454
456 # This is required as one cannot do a -ff-only merge in an empty repo.
455 # This is required as one cannot do a -ff-only merge in an empty repo.
457 target_repo._local_fetch(self.repo.path, 'master')
456 target_repo._local_fetch(self.repo.path, 'master')
458 with pytest.raises(RepositoryError):
457 with pytest.raises(RepositoryError):
459 target_repo._local_merge(
458 target_repo._local_merge(
460 'merge_message', 'user name', 'user@name.com',
459 'merge_message', 'user name', 'user@name.com',
461 target_repo._last_fetch_heads())
460 target_repo._last_fetch_heads())
462
461
463 def test_local_merge_in_bare_repo(self):
462 def test_local_merge_in_bare_repo(self):
464 with pytest.raises(RepositoryError):
463 with pytest.raises(RepositoryError):
465 self.repo._local_merge(
464 self.repo._local_merge(
466 'merge_message', 'user name', 'user@name.com', None)
465 'merge_message', 'user name', 'user@name.com', None)
467
466
468 def test_local_push_non_bare(self):
467 def test_local_push_non_bare(self):
469 target_repo = self.get_empty_repo()
468 target_repo = self.get_empty_repo()
470
469
471 pushed_branch = 'pushed_branch'
470 pushed_branch = 'pushed_branch'
472 self.repo._local_push('master', target_repo.path, pushed_branch)
471 self.repo._local_push('master', target_repo.path, pushed_branch)
473 # Fix the HEAD of the target repo, or otherwise GitRepository won't
472 # Fix the HEAD of the target repo, or otherwise GitRepository won't
474 # report any branches.
473 # report any branches.
475 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
474 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
476 f.write('ref: refs/heads/%s' % pushed_branch)
475 f.write('ref: refs/heads/%s' % pushed_branch)
477
476
478 target_repo = GitRepository(target_repo.path)
477 target_repo = GitRepository(target_repo.path)
479
478
480 assert (target_repo.branches[pushed_branch] ==
479 assert (target_repo.branches[pushed_branch] ==
481 self.repo.branches['master'])
480 self.repo.branches['master'])
482
481
483 def test_local_push_bare(self):
482 def test_local_push_bare(self):
484 target_repo = self.get_empty_repo(bare=True)
483 target_repo = self.get_empty_repo(bare=True)
485
484
486 pushed_branch = 'pushed_branch'
485 pushed_branch = 'pushed_branch'
487 self.repo._local_push('master', target_repo.path, pushed_branch)
486 self.repo._local_push('master', target_repo.path, pushed_branch)
488 # Fix the HEAD of the target repo, or otherwise GitRepository won't
487 # Fix the HEAD of the target repo, or otherwise GitRepository won't
489 # report any branches.
488 # report any branches.
490 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
489 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
491 f.write('ref: refs/heads/%s' % pushed_branch)
490 f.write('ref: refs/heads/%s' % pushed_branch)
492
491
493 target_repo = GitRepository(target_repo.path)
492 target_repo = GitRepository(target_repo.path)
494
493
495 assert (target_repo.branches[pushed_branch] ==
494 assert (target_repo.branches[pushed_branch] ==
496 self.repo.branches['master'])
495 self.repo.branches['master'])
497
496
498 def test_local_push_non_bare_target_branch_is_checked_out(self):
497 def test_local_push_non_bare_target_branch_is_checked_out(self):
499 target_repo = self.get_clone_repo()
498 target_repo = self.get_clone_repo()
500
499
501 pushed_branch = 'pushed_branch'
500 pushed_branch = 'pushed_branch'
502 # Create a new branch in source repo
501 # Create a new branch in source repo
503 new_branch_commit = target_repo.commit_ids[-3]
502 new_branch_commit = target_repo.commit_ids[-3]
504 target_repo._checkout(new_branch_commit)
503 target_repo._checkout(new_branch_commit)
505 target_repo._checkout(pushed_branch, create=True)
504 target_repo._checkout(pushed_branch, create=True)
506
505
507 self.repo._local_push('master', target_repo.path, pushed_branch)
506 self.repo._local_push('master', target_repo.path, pushed_branch)
508
507
509 target_repo = GitRepository(target_repo.path)
508 target_repo = GitRepository(target_repo.path)
510
509
511 assert (target_repo.branches[pushed_branch] ==
510 assert (target_repo.branches[pushed_branch] ==
512 self.repo.branches['master'])
511 self.repo.branches['master'])
513
512
514 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
513 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
515 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
514 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
516 with pytest.raises(RepositoryError):
515 with pytest.raises(RepositoryError):
517 self.repo._local_push('master', target_repo.path, 'master')
516 self.repo._local_push('master', target_repo.path, 'master')
518
517
519 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
518 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
520 target_repo = self.get_empty_repo(bare=True)
519 target_repo = self.get_empty_repo(bare=True)
521
520
522 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
521 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
523 self.repo._local_push(
522 self.repo._local_push(
524 'master', target_repo.path, 'master', enable_hooks=True)
523 'master', target_repo.path, 'master', enable_hooks=True)
525 env = run_mock.call_args[1]['extra_env']
524 env = run_mock.call_args[1]['extra_env']
526 assert 'RC_SKIP_HOOKS' not in env
525 assert 'RC_SKIP_HOOKS' not in env
527
526
528 def _add_failing_hook(self, repo_path, hook_name, bare=False):
527 def _add_failing_hook(self, repo_path, hook_name, bare=False):
529 path_components = (
528 path_components = (
530 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
529 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
531 hook_path = os.path.join(repo_path, *path_components)
530 hook_path = os.path.join(repo_path, *path_components)
532 with open(hook_path, 'w') as f:
531 with open(hook_path, 'w') as f:
533 script_lines = [
532 script_lines = [
534 '#!%s' % sys.executable,
533 '#!%s' % sys.executable,
535 'import os',
534 'import os',
536 'import sys',
535 'import sys',
537 'if os.environ.get("RC_SKIP_HOOKS"):',
536 'if os.environ.get("RC_SKIP_HOOKS"):',
538 ' sys.exit(0)',
537 ' sys.exit(0)',
539 'sys.exit(1)',
538 'sys.exit(1)',
540 ]
539 ]
541 f.write('\n'.join(script_lines))
540 f.write('\n'.join(script_lines))
542 os.chmod(hook_path, 0755)
541 os.chmod(hook_path, 0755)
543
542
544 def test_local_push_does_not_execute_hook(self):
543 def test_local_push_does_not_execute_hook(self):
545 target_repo = self.get_empty_repo()
544 target_repo = self.get_empty_repo()
546
545
547 pushed_branch = 'pushed_branch'
546 pushed_branch = 'pushed_branch'
548 self._add_failing_hook(target_repo.path, 'pre-receive')
547 self._add_failing_hook(target_repo.path, 'pre-receive')
549 self.repo._local_push('master', target_repo.path, pushed_branch)
548 self.repo._local_push('master', target_repo.path, pushed_branch)
550 # Fix the HEAD of the target repo, or otherwise GitRepository won't
549 # Fix the HEAD of the target repo, or otherwise GitRepository won't
551 # report any branches.
550 # report any branches.
552 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
551 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
553 f.write('ref: refs/heads/%s' % pushed_branch)
552 f.write('ref: refs/heads/%s' % pushed_branch)
554
553
555 target_repo = GitRepository(target_repo.path)
554 target_repo = GitRepository(target_repo.path)
556
555
557 assert (target_repo.branches[pushed_branch] ==
556 assert (target_repo.branches[pushed_branch] ==
558 self.repo.branches['master'])
557 self.repo.branches['master'])
559
558
560 def test_local_push_executes_hook(self):
559 def test_local_push_executes_hook(self):
561 target_repo = self.get_empty_repo(bare=True)
560 target_repo = self.get_empty_repo(bare=True)
562 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
561 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
563 with pytest.raises(RepositoryError):
562 with pytest.raises(RepositoryError):
564 self.repo._local_push(
563 self.repo._local_push(
565 'master', target_repo.path, 'master', enable_hooks=True)
564 'master', target_repo.path, 'master', enable_hooks=True)
566
565
567 def test_maybe_prepare_merge_workspace(self):
566 def test_maybe_prepare_merge_workspace(self):
568 workspace = self.repo._maybe_prepare_merge_workspace(
567 workspace = self.repo._maybe_prepare_merge_workspace(
569 2, 'pr2', Reference('branch', 'master', 'unused'),
568 2, 'pr2', Reference('branch', 'master', 'unused'),
570 Reference('branch', 'master', 'unused'))
569 Reference('branch', 'master', 'unused'))
571
570
572 assert os.path.isdir(workspace)
571 assert os.path.isdir(workspace)
573 workspace_repo = GitRepository(workspace)
572 workspace_repo = GitRepository(workspace)
574 assert workspace_repo.branches == self.repo.branches
573 assert workspace_repo.branches == self.repo.branches
575
574
576 # Calling it a second time should also succeed
575 # Calling it a second time should also succeed
577 workspace = self.repo._maybe_prepare_merge_workspace(
576 workspace = self.repo._maybe_prepare_merge_workspace(
578 2, 'pr2', Reference('branch', 'master', 'unused'),
577 2, 'pr2', Reference('branch', 'master', 'unused'),
579 Reference('branch', 'master', 'unused'))
578 Reference('branch', 'master', 'unused'))
580 assert os.path.isdir(workspace)
579 assert os.path.isdir(workspace)
581
580
582 def test_maybe_prepare_merge_workspace_different_refs(self):
581 def test_maybe_prepare_merge_workspace_different_refs(self):
583 workspace = self.repo._maybe_prepare_merge_workspace(
582 workspace = self.repo._maybe_prepare_merge_workspace(
584 2, 'pr2', Reference('branch', 'master', 'unused'),
583 2, 'pr2', Reference('branch', 'master', 'unused'),
585 Reference('branch', 'develop', 'unused'))
584 Reference('branch', 'develop', 'unused'))
586
585
587 assert os.path.isdir(workspace)
586 assert os.path.isdir(workspace)
588 workspace_repo = GitRepository(workspace)
587 workspace_repo = GitRepository(workspace)
589 assert workspace_repo.branches == self.repo.branches
588 assert workspace_repo.branches == self.repo.branches
590
589
591 # Calling it a second time should also succeed
590 # Calling it a second time should also succeed
592 workspace = self.repo._maybe_prepare_merge_workspace(
591 workspace = self.repo._maybe_prepare_merge_workspace(
593 2, 'pr2', Reference('branch', 'master', 'unused'),
592 2, 'pr2', Reference('branch', 'master', 'unused'),
594 Reference('branch', 'develop', 'unused'))
593 Reference('branch', 'develop', 'unused'))
595 assert os.path.isdir(workspace)
594 assert os.path.isdir(workspace)
596
595
597 def test_cleanup_merge_workspace(self):
596 def test_cleanup_merge_workspace(self):
598 workspace = self.repo._maybe_prepare_merge_workspace(
597 workspace = self.repo._maybe_prepare_merge_workspace(
599 2, 'pr3', Reference('branch', 'master', 'unused'),
598 2, 'pr3', Reference('branch', 'master', 'unused'),
600 Reference('branch', 'master', 'unused'))
599 Reference('branch', 'master', 'unused'))
601 self.repo.cleanup_merge_workspace(2, 'pr3')
600 self.repo.cleanup_merge_workspace(2, 'pr3')
602
601
603 assert not os.path.exists(workspace)
602 assert not os.path.exists(workspace)
604
603
605 def test_cleanup_merge_workspace_invalid_workspace_id(self):
604 def test_cleanup_merge_workspace_invalid_workspace_id(self):
606 # No assert: because in case of an inexistent workspace this function
605 # No assert: because in case of an inexistent workspace this function
607 # should still succeed.
606 # should still succeed.
608 self.repo.cleanup_merge_workspace(1, 'pr4')
607 self.repo.cleanup_merge_workspace(1, 'pr4')
609
608
610 def test_set_refs(self):
609 def test_set_refs(self):
611 test_ref = 'refs/test-refs/abcde'
610 test_ref = 'refs/test-refs/abcde'
612 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
611 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
613
612
614 self.repo.set_refs(test_ref, test_commit_id)
613 self.repo.set_refs(test_ref, test_commit_id)
615 stdout, _ = self.repo.run_git_command(['show-ref'])
614 stdout, _ = self.repo.run_git_command(['show-ref'])
616 assert test_ref in stdout
615 assert test_ref in stdout
617 assert test_commit_id in stdout
616 assert test_commit_id in stdout
618
617
619 def test_remove_ref(self):
618 def test_remove_ref(self):
620 test_ref = 'refs/test-refs/abcde'
619 test_ref = 'refs/test-refs/abcde'
621 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
620 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
622 self.repo.set_refs(test_ref, test_commit_id)
621 self.repo.set_refs(test_ref, test_commit_id)
623 stdout, _ = self.repo.run_git_command(['show-ref'])
622 stdout, _ = self.repo.run_git_command(['show-ref'])
624 assert test_ref in stdout
623 assert test_ref in stdout
625 assert test_commit_id in stdout
624 assert test_commit_id in stdout
626
625
627 self.repo.remove_ref(test_ref)
626 self.repo.remove_ref(test_ref)
628 stdout, _ = self.repo.run_git_command(['show-ref'])
627 stdout, _ = self.repo.run_git_command(['show-ref'])
629 assert test_ref not in stdout
628 assert test_ref not in stdout
630 assert test_commit_id not in stdout
629 assert test_commit_id not in stdout
631
630
632
631
633 class TestGitCommit(object):
632 class TestGitCommit(object):
634
633
635 @pytest.fixture(autouse=True)
634 @pytest.fixture(autouse=True)
636 def prepare(self):
635 def prepare(self):
637 self.repo = GitRepository(TEST_GIT_REPO)
636 self.repo = GitRepository(TEST_GIT_REPO)
638
637
639 def test_default_commit(self):
638 def test_default_commit(self):
640 tip = self.repo.get_commit()
639 tip = self.repo.get_commit()
641 assert tip == self.repo.get_commit(None)
640 assert tip == self.repo.get_commit(None)
642 assert tip == self.repo.get_commit('tip')
641 assert tip == self.repo.get_commit('tip')
643
642
644 def test_root_node(self):
643 def test_root_node(self):
645 tip = self.repo.get_commit()
644 tip = self.repo.get_commit()
646 assert tip.root is tip.get_node('')
645 assert tip.root is tip.get_node('')
647
646
648 def test_lazy_fetch(self):
647 def test_lazy_fetch(self):
649 """
648 """
650 Test if commit's nodes expands and are cached as we walk through
649 Test if commit's nodes expands and are cached as we walk through
651 the commit. This test is somewhat hard to write as order of tests
650 the commit. This test is somewhat hard to write as order of tests
652 is a key here. Written by running command after command in a shell.
651 is a key here. Written by running command after command in a shell.
653 """
652 """
654 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
653 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
655 assert commit_id in self.repo.commit_ids
654 assert commit_id in self.repo.commit_ids
656 commit = self.repo.get_commit(commit_id)
655 commit = self.repo.get_commit(commit_id)
657 assert len(commit.nodes) == 0
656 assert len(commit.nodes) == 0
658 root = commit.root
657 root = commit.root
659 assert len(commit.nodes) == 1
658 assert len(commit.nodes) == 1
660 assert len(root.nodes) == 8
659 assert len(root.nodes) == 8
661 # accessing root.nodes updates commit.nodes
660 # accessing root.nodes updates commit.nodes
662 assert len(commit.nodes) == 9
661 assert len(commit.nodes) == 9
663
662
664 docs = root.get_node('docs')
663 docs = root.get_node('docs')
665 # we haven't yet accessed anything new as docs dir was already cached
664 # we haven't yet accessed anything new as docs dir was already cached
666 assert len(commit.nodes) == 9
665 assert len(commit.nodes) == 9
667 assert len(docs.nodes) == 8
666 assert len(docs.nodes) == 8
668 # accessing docs.nodes updates commit.nodes
667 # accessing docs.nodes updates commit.nodes
669 assert len(commit.nodes) == 17
668 assert len(commit.nodes) == 17
670
669
671 assert docs is commit.get_node('docs')
670 assert docs is commit.get_node('docs')
672 assert docs is root.nodes[0]
671 assert docs is root.nodes[0]
673 assert docs is root.dirs[0]
672 assert docs is root.dirs[0]
674 assert docs is commit.get_node('docs')
673 assert docs is commit.get_node('docs')
675
674
676 def test_nodes_with_commit(self):
675 def test_nodes_with_commit(self):
677 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
676 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
678 commit = self.repo.get_commit(commit_id)
677 commit = self.repo.get_commit(commit_id)
679 root = commit.root
678 root = commit.root
680 docs = root.get_node('docs')
679 docs = root.get_node('docs')
681 assert docs is commit.get_node('docs')
680 assert docs is commit.get_node('docs')
682 api = docs.get_node('api')
681 api = docs.get_node('api')
683 assert api is commit.get_node('docs/api')
682 assert api is commit.get_node('docs/api')
684 index = api.get_node('index.rst')
683 index = api.get_node('index.rst')
685 assert index is commit.get_node('docs/api/index.rst')
684 assert index is commit.get_node('docs/api/index.rst')
686 assert index is commit.get_node('docs')\
685 assert index is commit.get_node('docs')\
687 .get_node('api')\
686 .get_node('api')\
688 .get_node('index.rst')
687 .get_node('index.rst')
689
688
690 def test_branch_and_tags(self):
689 def test_branch_and_tags(self):
691 """
690 """
692 rev0 = self.repo.commit_ids[0]
691 rev0 = self.repo.commit_ids[0]
693 commit0 = self.repo.get_commit(rev0)
692 commit0 = self.repo.get_commit(rev0)
694 assert commit0.branch == 'master'
693 assert commit0.branch == 'master'
695 assert commit0.tags == []
694 assert commit0.tags == []
696
695
697 rev10 = self.repo.commit_ids[10]
696 rev10 = self.repo.commit_ids[10]
698 commit10 = self.repo.get_commit(rev10)
697 commit10 = self.repo.get_commit(rev10)
699 assert commit10.branch == 'master'
698 assert commit10.branch == 'master'
700 assert commit10.tags == []
699 assert commit10.tags == []
701
700
702 rev44 = self.repo.commit_ids[44]
701 rev44 = self.repo.commit_ids[44]
703 commit44 = self.repo.get_commit(rev44)
702 commit44 = self.repo.get_commit(rev44)
704 assert commit44.branch == 'web-branch'
703 assert commit44.branch == 'web-branch'
705
704
706 tip = self.repo.get_commit('tip')
705 tip = self.repo.get_commit('tip')
707 assert 'tip' in tip.tags
706 assert 'tip' in tip.tags
708 """
707 """
709 # Those tests would fail - branches are now going
708 # Those tests would fail - branches are now going
710 # to be changed at main API in order to support git backend
709 # to be changed at main API in order to support git backend
711 pass
710 pass
712
711
713 def test_file_size(self):
712 def test_file_size(self):
714 to_check = (
713 to_check = (
715 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
714 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
716 'vcs/backends/BaseRepository.py', 502),
715 'vcs/backends/BaseRepository.py', 502),
717 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
716 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
718 'vcs/backends/hg.py', 854),
717 'vcs/backends/hg.py', 854),
719 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
718 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
720 'setup.py', 1068),
719 'setup.py', 1068),
721
720
722 ('d955cd312c17b02143c04fa1099a352b04368118',
721 ('d955cd312c17b02143c04fa1099a352b04368118',
723 'vcs/backends/base.py', 2921),
722 'vcs/backends/base.py', 2921),
724 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
723 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
725 'vcs/backends/base.py', 3936),
724 'vcs/backends/base.py', 3936),
726 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
725 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
727 'vcs/backends/base.py', 6189),
726 'vcs/backends/base.py', 6189),
728 )
727 )
729 for commit_id, path, size in to_check:
728 for commit_id, path, size in to_check:
730 node = self.repo.get_commit(commit_id).get_node(path)
729 node = self.repo.get_commit(commit_id).get_node(path)
731 assert node.is_file()
730 assert node.is_file()
732 assert node.size == size
731 assert node.size == size
733
732
734 def test_file_history_from_commits(self):
733 def test_file_history_from_commits(self):
735 node = self.repo[10].get_node('setup.py')
734 node = self.repo[10].get_node('setup.py')
736 commit_ids = [commit.raw_id for commit in node.history]
735 commit_ids = [commit.raw_id for commit in node.history]
737 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
736 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
738
737
739 node = self.repo[20].get_node('setup.py')
738 node = self.repo[20].get_node('setup.py')
740 node_ids = [commit.raw_id for commit in node.history]
739 node_ids = [commit.raw_id for commit in node.history]
741 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
740 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
742 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
741 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
743
742
744 # special case we check history from commit that has this particular
743 # special case we check history from commit that has this particular
745 # file changed this means we check if it's included as well
744 # file changed this means we check if it's included as well
746 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
745 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
747 .get_node('setup.py')
746 .get_node('setup.py')
748 node_ids = [commit.raw_id for commit in node.history]
747 node_ids = [commit.raw_id for commit in node.history]
749 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
748 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
750 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
749 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
751
750
752 def test_file_history(self):
751 def test_file_history(self):
753 # we can only check if those commits are present in the history
752 # we can only check if those commits are present in the history
754 # as we cannot update this test every time file is changed
753 # as we cannot update this test every time file is changed
755 files = {
754 files = {
756 'setup.py': [
755 'setup.py': [
757 '54386793436c938cff89326944d4c2702340037d',
756 '54386793436c938cff89326944d4c2702340037d',
758 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
757 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
759 '998ed409c795fec2012b1c0ca054d99888b22090',
758 '998ed409c795fec2012b1c0ca054d99888b22090',
760 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
759 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
761 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
760 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
762 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
761 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
763 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
762 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
764 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
763 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
765 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
764 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
766 ],
765 ],
767 'vcs/nodes.py': [
766 'vcs/nodes.py': [
768 '33fa3223355104431402a888fa77a4e9956feb3e',
767 '33fa3223355104431402a888fa77a4e9956feb3e',
769 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
768 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
770 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
769 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
771 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
770 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
772 'c877b68d18e792a66b7f4c529ea02c8f80801542',
771 'c877b68d18e792a66b7f4c529ea02c8f80801542',
773 '4313566d2e417cb382948f8d9d7c765330356054',
772 '4313566d2e417cb382948f8d9d7c765330356054',
774 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
773 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
775 '54386793436c938cff89326944d4c2702340037d',
774 '54386793436c938cff89326944d4c2702340037d',
776 '54000345d2e78b03a99d561399e8e548de3f3203',
775 '54000345d2e78b03a99d561399e8e548de3f3203',
777 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
776 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
778 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
777 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
779 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
778 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
780 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
779 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
781 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
780 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
782 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
781 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
783 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
782 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
784 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
783 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
785 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
784 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
786 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
785 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
787 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
786 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
788 'f15c21f97864b4f071cddfbf2750ec2e23859414',
787 'f15c21f97864b4f071cddfbf2750ec2e23859414',
789 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
788 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
790 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
789 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
791 '84dec09632a4458f79f50ddbbd155506c460b4f9',
790 '84dec09632a4458f79f50ddbbd155506c460b4f9',
792 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
791 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
793 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
792 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
794 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
793 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
795 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
794 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
796 '6970b057cffe4aab0a792aa634c89f4bebf01441',
795 '6970b057cffe4aab0a792aa634c89f4bebf01441',
797 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
796 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
798 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
797 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
799 ],
798 ],
800 'vcs/backends/git.py': [
799 'vcs/backends/git.py': [
801 '4cf116ad5a457530381135e2f4c453e68a1b0105',
800 '4cf116ad5a457530381135e2f4c453e68a1b0105',
802 '9a751d84d8e9408e736329767387f41b36935153',
801 '9a751d84d8e9408e736329767387f41b36935153',
803 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
802 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
804 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
803 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
805 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
804 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
806 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
805 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
807 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
806 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
808 '54000345d2e78b03a99d561399e8e548de3f3203',
807 '54000345d2e78b03a99d561399e8e548de3f3203',
809 ],
808 ],
810 }
809 }
811 for path, commit_ids in files.items():
810 for path, commit_ids in files.items():
812 node = self.repo.get_commit(commit_ids[0]).get_node(path)
811 node = self.repo.get_commit(commit_ids[0]).get_node(path)
813 node_ids = [commit.raw_id for commit in node.history]
812 node_ids = [commit.raw_id for commit in node.history]
814 assert set(commit_ids).issubset(set(node_ids)), (
813 assert set(commit_ids).issubset(set(node_ids)), (
815 "We assumed that %s is subset of commit_ids for which file %s "
814 "We assumed that %s is subset of commit_ids for which file %s "
816 "has been changed, and history of that node returned: %s"
815 "has been changed, and history of that node returned: %s"
817 % (commit_ids, path, node_ids))
816 % (commit_ids, path, node_ids))
818
817
819 def test_file_annotate(self):
818 def test_file_annotate(self):
820 files = {
819 files = {
821 'vcs/backends/__init__.py': {
820 'vcs/backends/__init__.py': {
822 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
821 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
823 'lines_no': 1,
822 'lines_no': 1,
824 'commits': [
823 'commits': [
825 'c1214f7e79e02fc37156ff215cd71275450cffc3',
824 'c1214f7e79e02fc37156ff215cd71275450cffc3',
826 ],
825 ],
827 },
826 },
828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
829 'lines_no': 21,
828 'lines_no': 21,
830 'commits': [
829 'commits': [
831 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
830 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
851 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
852 ],
851 ],
853 },
852 },
854 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
853 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
855 'lines_no': 32,
854 'lines_no': 32,
856 'commits': [
855 'commits': [
857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
856 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
858 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
859 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
858 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
861 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
863 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
864 '54000345d2e78b03a99d561399e8e548de3f3203',
863 '54000345d2e78b03a99d561399e8e548de3f3203',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
867 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
866 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
871 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
872 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
871 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
873 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
872 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
874 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
873 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
874 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
877 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
877 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
881 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
882 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
881 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
883 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
882 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
883 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
885 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
885 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
888 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
889 ],
888 ],
890 },
889 },
891 },
890 },
892 }
891 }
893
892
894 for fname, commit_dict in files.items():
893 for fname, commit_dict in files.items():
895 for commit_id, __ in commit_dict.items():
894 for commit_id, __ in commit_dict.items():
896 commit = self.repo.get_commit(commit_id)
895 commit = self.repo.get_commit(commit_id)
897
896
898 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
897 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
899 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
898 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
900 assert l1_1 == l1_2
899 assert l1_1 == l1_2
901 l1 = l1_1
900 l1 = l1_1
902 l2 = files[fname][commit_id]['commits']
901 l2 = files[fname][commit_id]['commits']
903 assert l1 == l2, (
902 assert l1 == l2, (
904 "The lists of commit_ids for %s@commit_id %s"
903 "The lists of commit_ids for %s@commit_id %s"
905 "from annotation list should match each other, "
904 "from annotation list should match each other, "
906 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
905 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
907
906
908 def test_files_state(self):
907 def test_files_state(self):
909 """
908 """
910 Tests state of FileNodes.
909 Tests state of FileNodes.
911 """
910 """
912 node = self.repo\
911 node = self.repo\
913 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
912 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
914 .get_node('vcs/utils/diffs.py')
913 .get_node('vcs/utils/diffs.py')
915 assert node.state, NodeState.ADDED
914 assert node.state, NodeState.ADDED
916 assert node.added
915 assert node.added
917 assert not node.changed
916 assert not node.changed
918 assert not node.not_changed
917 assert not node.not_changed
919 assert not node.removed
918 assert not node.removed
920
919
921 node = self.repo\
920 node = self.repo\
922 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
921 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
923 .get_node('.hgignore')
922 .get_node('.hgignore')
924 assert node.state, NodeState.CHANGED
923 assert node.state, NodeState.CHANGED
925 assert not node.added
924 assert not node.added
926 assert node.changed
925 assert node.changed
927 assert not node.not_changed
926 assert not node.not_changed
928 assert not node.removed
927 assert not node.removed
929
928
930 node = self.repo\
929 node = self.repo\
931 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
930 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
932 .get_node('setup.py')
931 .get_node('setup.py')
933 assert node.state, NodeState.NOT_CHANGED
932 assert node.state, NodeState.NOT_CHANGED
934 assert not node.added
933 assert not node.added
935 assert not node.changed
934 assert not node.changed
936 assert node.not_changed
935 assert node.not_changed
937 assert not node.removed
936 assert not node.removed
938
937
939 # If node has REMOVED state then trying to fetch it would raise
938 # If node has REMOVED state then trying to fetch it would raise
940 # CommitError exception
939 # CommitError exception
941 commit = self.repo.get_commit(
940 commit = self.repo.get_commit(
942 'fa6600f6848800641328adbf7811fd2372c02ab2')
941 'fa6600f6848800641328adbf7811fd2372c02ab2')
943 path = 'vcs/backends/BaseRepository.py'
942 path = 'vcs/backends/BaseRepository.py'
944 with pytest.raises(NodeDoesNotExistError):
943 with pytest.raises(NodeDoesNotExistError):
945 commit.get_node(path)
944 commit.get_node(path)
946 # but it would be one of ``removed`` (commit's attribute)
945 # but it would be one of ``removed`` (commit's attribute)
947 assert path in [rf.path for rf in commit.removed]
946 assert path in [rf.path for rf in commit.removed]
948
947
949 commit = self.repo.get_commit(
948 commit = self.repo.get_commit(
950 '54386793436c938cff89326944d4c2702340037d')
949 '54386793436c938cff89326944d4c2702340037d')
951 changed = [
950 changed = [
952 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
951 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
953 'vcs/nodes.py']
952 'vcs/nodes.py']
954 assert set(changed) == set([f.path for f in commit.changed])
953 assert set(changed) == set([f.path for f in commit.changed])
955
954
956 def test_unicode_branch_refs(self):
955 def test_unicode_branch_refs(self):
957 unicode_branches = {
956 unicode_branches = {
958 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
957 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
959 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
958 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
960 }
959 }
961 with mock.patch(
960 with mock.patch(
962 ("rhodecode.lib.vcs.backends.git.repository"
961 ("rhodecode.lib.vcs.backends.git.repository"
963 ".GitRepository._refs"),
962 ".GitRepository._refs"),
964 unicode_branches):
963 unicode_branches):
965 branches = self.repo.branches
964 branches = self.repo.branches
966
965
967 assert 'unicode' in branches
966 assert 'unicode' in branches
968 assert u'uniΓ§ΓΆβˆ‚e' in branches
967 assert u'uniΓ§ΓΆβˆ‚e' in branches
969
968
970 def test_unicode_tag_refs(self):
969 def test_unicode_tag_refs(self):
971 unicode_tags = {
970 unicode_tags = {
972 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
971 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
973 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
972 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
974 }
973 }
975 with mock.patch(
974 with mock.patch(
976 ("rhodecode.lib.vcs.backends.git.repository"
975 ("rhodecode.lib.vcs.backends.git.repository"
977 ".GitRepository._refs"),
976 ".GitRepository._refs"),
978 unicode_tags):
977 unicode_tags):
979 tags = self.repo.tags
978 tags = self.repo.tags
980
979
981 assert 'unicode' in tags
980 assert 'unicode' in tags
982 assert u'uniΓ§ΓΆβˆ‚e' in tags
981 assert u'uniΓ§ΓΆβˆ‚e' in tags
983
982
984 def test_commit_message_is_unicode(self):
983 def test_commit_message_is_unicode(self):
985 for commit in self.repo:
984 for commit in self.repo:
986 assert type(commit.message) == unicode
985 assert type(commit.message) == unicode
987
986
988 def test_commit_author_is_unicode(self):
987 def test_commit_author_is_unicode(self):
989 for commit in self.repo:
988 for commit in self.repo:
990 assert type(commit.author) == unicode
989 assert type(commit.author) == unicode
991
990
992 def test_repo_files_content_is_unicode(self):
991 def test_repo_files_content_is_unicode(self):
993 commit = self.repo.get_commit()
992 commit = self.repo.get_commit()
994 for node in commit.get_node('/'):
993 for node in commit.get_node('/'):
995 if node.is_file():
994 if node.is_file():
996 assert type(node.content) == unicode
995 assert type(node.content) == unicode
997
996
998 def test_wrong_path(self):
997 def test_wrong_path(self):
999 # There is 'setup.py' in the root dir but not there:
998 # There is 'setup.py' in the root dir but not there:
1000 path = 'foo/bar/setup.py'
999 path = 'foo/bar/setup.py'
1001 tip = self.repo.get_commit()
1000 tip = self.repo.get_commit()
1002 with pytest.raises(VCSError):
1001 with pytest.raises(VCSError):
1003 tip.get_node(path)
1002 tip.get_node(path)
1004
1003
1005 @pytest.mark.parametrize("author_email, commit_id", [
1004 @pytest.mark.parametrize("author_email, commit_id", [
1006 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1005 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1007 ('lukasz.balcerzak@python-center.pl',
1006 ('lukasz.balcerzak@python-center.pl',
1008 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1007 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1009 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1008 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1010 ])
1009 ])
1011 def test_author_email(self, author_email, commit_id):
1010 def test_author_email(self, author_email, commit_id):
1012 commit = self.repo.get_commit(commit_id)
1011 commit = self.repo.get_commit(commit_id)
1013 assert author_email == commit.author_email
1012 assert author_email == commit.author_email
1014
1013
1015 @pytest.mark.parametrize("author, commit_id", [
1014 @pytest.mark.parametrize("author, commit_id", [
1016 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1015 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1017 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1016 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1018 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1017 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1019 ])
1018 ])
1020 def test_author_username(self, author, commit_id):
1019 def test_author_username(self, author, commit_id):
1021 commit = self.repo.get_commit(commit_id)
1020 commit = self.repo.get_commit(commit_id)
1022 assert author == commit.author_name
1021 assert author == commit.author_name
1023
1022
1024
1023
1025 class TestLargeFileRepo(object):
1024 class TestLargeFileRepo(object):
1026
1025
1027 def test_large_file(self, backend_git):
1026 def test_large_file(self, backend_git):
1028 conf = make_db_config()
1027 conf = make_db_config()
1029 repo = backend_git.create_test_repo('largefiles', conf)
1028 repo = backend_git.create_test_repo('largefiles', conf)
1030
1029
1031 tip = repo.scm_instance().get_commit()
1030 tip = repo.scm_instance().get_commit()
1032
1031
1033 # extract stored LF node into the origin cache
1032 # extract stored LF node into the origin cache
1034 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1033 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1035
1034
1036 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1035 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1037 oid_path = os.path.join(lfs_store, oid)
1036 oid_path = os.path.join(lfs_store, oid)
1038 oid_destination = os.path.join(
1037 oid_destination = os.path.join(
1039 conf.get('vcs_git_lfs', 'store_location'), oid)
1038 conf.get('vcs_git_lfs', 'store_location'), oid)
1040 shutil.copy(oid_path, oid_destination)
1039 shutil.copy(oid_path, oid_destination)
1041
1040
1042 node = tip.get_node('1MB.zip')
1041 node = tip.get_node('1MB.zip')
1043
1042
1044 lf_node = node.get_largefile_node()
1043 lf_node = node.get_largefile_node()
1045
1044
1046 assert lf_node.is_largefile() is True
1045 assert lf_node.is_largefile() is True
1047 assert lf_node.size == 1024000
1046 assert lf_node.size == 1024000
1048 assert lf_node.name == '1MB.zip'
1047 assert lf_node.name == '1MB.zip'
1049
1048
1050
1049
1051 @pytest.mark.usefixtures("vcs_repository_support")
1050 @pytest.mark.usefixtures("vcs_repository_support")
1052 class TestGitSpecificWithRepo(BackendTestMixin):
1051 class TestGitSpecificWithRepo(BackendTestMixin):
1053
1052
1054 @classmethod
1053 @classmethod
1055 def _get_commits(cls):
1054 def _get_commits(cls):
1056 return [
1055 return [
1057 {
1056 {
1058 'message': 'Initial',
1057 'message': 'Initial',
1059 'author': 'Joe Doe <joe.doe@example.com>',
1058 'author': 'Joe Doe <joe.doe@example.com>',
1060 'date': datetime.datetime(2010, 1, 1, 20),
1059 'date': datetime.datetime(2010, 1, 1, 20),
1061 'added': [
1060 'added': [
1062 FileNode('foobar/static/js/admin/base.js', content='base'),
1061 FileNode('foobar/static/js/admin/base.js', content='base'),
1063 FileNode(
1062 FileNode(
1064 'foobar/static/admin', content='admin',
1063 'foobar/static/admin', content='admin',
1065 mode=0120000), # this is a link
1064 mode=0120000), # this is a link
1066 FileNode('foo', content='foo'),
1065 FileNode('foo', content='foo'),
1067 ],
1066 ],
1068 },
1067 },
1069 {
1068 {
1070 'message': 'Second',
1069 'message': 'Second',
1071 'author': 'Joe Doe <joe.doe@example.com>',
1070 'author': 'Joe Doe <joe.doe@example.com>',
1072 'date': datetime.datetime(2010, 1, 1, 22),
1071 'date': datetime.datetime(2010, 1, 1, 22),
1073 'added': [
1072 'added': [
1074 FileNode('foo2', content='foo2'),
1073 FileNode('foo2', content='foo2'),
1075 ],
1074 ],
1076 },
1075 },
1077 ]
1076 ]
1078
1077
1079 def test_paths_slow_traversing(self):
1078 def test_paths_slow_traversing(self):
1080 commit = self.repo.get_commit()
1079 commit = self.repo.get_commit()
1081 assert commit.get_node('foobar').get_node('static').get_node('js')\
1080 assert commit.get_node('foobar').get_node('static').get_node('js')\
1082 .get_node('admin').get_node('base.js').content == 'base'
1081 .get_node('admin').get_node('base.js').content == 'base'
1083
1082
1084 def test_paths_fast_traversing(self):
1083 def test_paths_fast_traversing(self):
1085 commit = self.repo.get_commit()
1084 commit = self.repo.get_commit()
1086 assert (
1085 assert (
1087 commit.get_node('foobar/static/js/admin/base.js').content ==
1086 commit.get_node('foobar/static/js/admin/base.js').content ==
1088 'base')
1087 'base')
1089
1088
1090 def test_get_diff_runs_git_command_with_hashes(self):
1089 def test_get_diff_runs_git_command_with_hashes(self):
1091 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1090 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1092 self.repo.get_diff(self.repo[0], self.repo[1])
1091 self.repo.get_diff(self.repo[0], self.repo[1])
1093 self.repo.run_git_command.assert_called_once_with(
1092 self.repo.run_git_command.assert_called_once_with(
1094 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1093 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1095 '--abbrev=40', self.repo._get_commit_id(0),
1094 '--abbrev=40', self.repo._get_commit_id(0),
1096 self.repo._get_commit_id(1)])
1095 self.repo._get_commit_id(1)])
1097
1096
1098 def test_get_diff_runs_git_command_with_str_hashes(self):
1097 def test_get_diff_runs_git_command_with_str_hashes(self):
1099 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1098 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1100 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1099 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1101 self.repo.run_git_command.assert_called_once_with(
1100 self.repo.run_git_command.assert_called_once_with(
1102 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1101 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1103 '--abbrev=40', self.repo._get_commit_id(1)])
1102 '--abbrev=40', self.repo._get_commit_id(1)])
1104
1103
1105 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1104 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1106 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1105 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1107 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1106 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1108 self.repo.run_git_command.assert_called_once_with(
1107 self.repo.run_git_command.assert_called_once_with(
1109 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1108 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1110 '--abbrev=40', self.repo._get_commit_id(0),
1109 '--abbrev=40', self.repo._get_commit_id(0),
1111 self.repo._get_commit_id(1), '--', 'foo'])
1110 self.repo._get_commit_id(1), '--', 'foo'])
1112
1111
1113
1112
1114 @pytest.mark.usefixtures("vcs_repository_support")
1113 @pytest.mark.usefixtures("vcs_repository_support")
1115 class TestGitRegression(BackendTestMixin):
1114 class TestGitRegression(BackendTestMixin):
1116
1115
1117 @classmethod
1116 @classmethod
1118 def _get_commits(cls):
1117 def _get_commits(cls):
1119 return [
1118 return [
1120 {
1119 {
1121 'message': 'Initial',
1120 'message': 'Initial',
1122 'author': 'Joe Doe <joe.doe@example.com>',
1121 'author': 'Joe Doe <joe.doe@example.com>',
1123 'date': datetime.datetime(2010, 1, 1, 20),
1122 'date': datetime.datetime(2010, 1, 1, 20),
1124 'added': [
1123 'added': [
1125 FileNode('bot/__init__.py', content='base'),
1124 FileNode('bot/__init__.py', content='base'),
1126 FileNode('bot/templates/404.html', content='base'),
1125 FileNode('bot/templates/404.html', content='base'),
1127 FileNode('bot/templates/500.html', content='base'),
1126 FileNode('bot/templates/500.html', content='base'),
1128 ],
1127 ],
1129 },
1128 },
1130 {
1129 {
1131 'message': 'Second',
1130 'message': 'Second',
1132 'author': 'Joe Doe <joe.doe@example.com>',
1131 'author': 'Joe Doe <joe.doe@example.com>',
1133 'date': datetime.datetime(2010, 1, 1, 22),
1132 'date': datetime.datetime(2010, 1, 1, 22),
1134 'added': [
1133 'added': [
1135 FileNode('bot/build/migrations/1.py', content='foo2'),
1134 FileNode('bot/build/migrations/1.py', content='foo2'),
1136 FileNode('bot/build/migrations/2.py', content='foo2'),
1135 FileNode('bot/build/migrations/2.py', content='foo2'),
1137 FileNode(
1136 FileNode(
1138 'bot/build/static/templates/f.html', content='foo2'),
1137 'bot/build/static/templates/f.html', content='foo2'),
1139 FileNode(
1138 FileNode(
1140 'bot/build/static/templates/f1.html', content='foo2'),
1139 'bot/build/static/templates/f1.html', content='foo2'),
1141 FileNode('bot/build/templates/err.html', content='foo2'),
1140 FileNode('bot/build/templates/err.html', content='foo2'),
1142 FileNode('bot/build/templates/err2.html', content='foo2'),
1141 FileNode('bot/build/templates/err2.html', content='foo2'),
1143 ],
1142 ],
1144 },
1143 },
1145 ]
1144 ]
1146
1145
1147 @pytest.mark.parametrize("path, expected_paths", [
1146 @pytest.mark.parametrize("path, expected_paths", [
1148 ('bot', [
1147 ('bot', [
1149 'bot/build',
1148 'bot/build',
1150 'bot/templates',
1149 'bot/templates',
1151 'bot/__init__.py']),
1150 'bot/__init__.py']),
1152 ('bot/build', [
1151 ('bot/build', [
1153 'bot/build/migrations',
1152 'bot/build/migrations',
1154 'bot/build/static',
1153 'bot/build/static',
1155 'bot/build/templates']),
1154 'bot/build/templates']),
1156 ('bot/build/static', [
1155 ('bot/build/static', [
1157 'bot/build/static/templates']),
1156 'bot/build/static/templates']),
1158 ('bot/build/static/templates', [
1157 ('bot/build/static/templates', [
1159 'bot/build/static/templates/f.html',
1158 'bot/build/static/templates/f.html',
1160 'bot/build/static/templates/f1.html']),
1159 'bot/build/static/templates/f1.html']),
1161 ('bot/build/templates', [
1160 ('bot/build/templates', [
1162 'bot/build/templates/err.html',
1161 'bot/build/templates/err.html',
1163 'bot/build/templates/err2.html']),
1162 'bot/build/templates/err2.html']),
1164 ('bot/templates/', [
1163 ('bot/templates/', [
1165 'bot/templates/404.html',
1164 'bot/templates/404.html',
1166 'bot/templates/500.html']),
1165 'bot/templates/500.html']),
1167 ])
1166 ])
1168 def test_similar_paths(self, path, expected_paths):
1167 def test_similar_paths(self, path, expected_paths):
1169 commit = self.repo.get_commit()
1168 commit = self.repo.get_commit()
1170 paths = [n.path for n in commit.get_nodes(path)]
1169 paths = [n.path for n in commit.get_nodes(path)]
1171 assert paths == expected_paths
1170 assert paths == expected_paths
1172
1171
1173
1172
1174 class TestDiscoverGitVersion:
1173 class TestDiscoverGitVersion(object):
1175
1174
1176 def test_returns_git_version(self, baseapp):
1175 def test_returns_git_version(self, baseapp):
1177 version = discover_git_version()
1176 version = discover_git_version()
1178 assert version
1177 assert version
1179
1178
1180 def test_returns_empty_string_without_vcsserver(self):
1179 def test_returns_empty_string_without_vcsserver(self):
1181 mock_connection = mock.Mock()
1180 mock_connection = mock.Mock()
1182 mock_connection.discover_git_version = mock.Mock(
1181 mock_connection.discover_git_version = mock.Mock(
1183 side_effect=Exception)
1182 side_effect=Exception)
1184 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1183 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1185 version = discover_git_version()
1184 version = discover_git_version()
1186 assert version == ''
1185 assert version == ''
1187
1186
1188
1187
1189 class TestGetSubmoduleUrl(object):
1188 class TestGetSubmoduleUrl(object):
1190 def test_submodules_file_found(self):
1189 def test_submodules_file_found(self):
1191 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1190 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1192 node = mock.Mock()
1191 node = mock.Mock()
1193 with mock.patch.object(
1192 with mock.patch.object(
1194 commit, 'get_node', return_value=node) as get_node_mock:
1193 commit, 'get_node', return_value=node) as get_node_mock:
1195 node.content = (
1194 node.content = (
1196 '[submodule "subrepo1"]\n'
1195 '[submodule "subrepo1"]\n'
1197 '\tpath = subrepo1\n'
1196 '\tpath = subrepo1\n'
1198 '\turl = https://code.rhodecode.com/dulwich\n'
1197 '\turl = https://code.rhodecode.com/dulwich\n'
1199 )
1198 )
1200 result = commit._get_submodule_url('subrepo1')
1199 result = commit._get_submodule_url('subrepo1')
1201 get_node_mock.assert_called_once_with('.gitmodules')
1200 get_node_mock.assert_called_once_with('.gitmodules')
1202 assert result == 'https://code.rhodecode.com/dulwich'
1201 assert result == 'https://code.rhodecode.com/dulwich'
1203
1202
1204 def test_complex_submodule_path(self):
1203 def test_complex_submodule_path(self):
1205 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1204 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1206 node = mock.Mock()
1205 node = mock.Mock()
1207 with mock.patch.object(
1206 with mock.patch.object(
1208 commit, 'get_node', return_value=node) as get_node_mock:
1207 commit, 'get_node', return_value=node) as get_node_mock:
1209 node.content = (
1208 node.content = (
1210 '[submodule "complex/subrepo/path"]\n'
1209 '[submodule "complex/subrepo/path"]\n'
1211 '\tpath = complex/subrepo/path\n'
1210 '\tpath = complex/subrepo/path\n'
1212 '\turl = https://code.rhodecode.com/dulwich\n'
1211 '\turl = https://code.rhodecode.com/dulwich\n'
1213 )
1212 )
1214 result = commit._get_submodule_url('complex/subrepo/path')
1213 result = commit._get_submodule_url('complex/subrepo/path')
1215 get_node_mock.assert_called_once_with('.gitmodules')
1214 get_node_mock.assert_called_once_with('.gitmodules')
1216 assert result == 'https://code.rhodecode.com/dulwich'
1215 assert result == 'https://code.rhodecode.com/dulwich'
1217
1216
1218 def test_submodules_file_not_found(self):
1217 def test_submodules_file_not_found(self):
1219 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1218 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1220 with mock.patch.object(
1219 with mock.patch.object(
1221 commit, 'get_node', side_effect=NodeDoesNotExistError):
1220 commit, 'get_node', side_effect=NodeDoesNotExistError):
1222 result = commit._get_submodule_url('complex/subrepo/path')
1221 result = commit._get_submodule_url('complex/subrepo/path')
1223 assert result is None
1222 assert result is None
1224
1223
1225 def test_path_not_found(self):
1224 def test_path_not_found(self):
1226 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1225 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1227 node = mock.Mock()
1226 node = mock.Mock()
1228 with mock.patch.object(
1227 with mock.patch.object(
1229 commit, 'get_node', return_value=node) as get_node_mock:
1228 commit, 'get_node', return_value=node) as get_node_mock:
1230 node.content = (
1229 node.content = (
1231 '[submodule "subrepo1"]\n'
1230 '[submodule "subrepo1"]\n'
1232 '\tpath = subrepo1\n'
1231 '\tpath = subrepo1\n'
1233 '\turl = https://code.rhodecode.com/dulwich\n'
1232 '\turl = https://code.rhodecode.com/dulwich\n'
1234 )
1233 )
1235 result = commit._get_submodule_url('subrepo2')
1234 result = commit._get_submodule_url('subrepo2')
1236 get_node_mock.assert_called_once_with('.gitmodules')
1235 get_node_mock.assert_called_once_with('.gitmodules')
1237 assert result is None
1236 assert result is None
1238
1237
1239 def test_returns_cached_values(self):
1238 def test_returns_cached_values(self):
1240 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1239 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1241 node = mock.Mock()
1240 node = mock.Mock()
1242 with mock.patch.object(
1241 with mock.patch.object(
1243 commit, 'get_node', return_value=node) as get_node_mock:
1242 commit, 'get_node', return_value=node) as get_node_mock:
1244 node.content = (
1243 node.content = (
1245 '[submodule "subrepo1"]\n'
1244 '[submodule "subrepo1"]\n'
1246 '\tpath = subrepo1\n'
1245 '\tpath = subrepo1\n'
1247 '\turl = https://code.rhodecode.com/dulwich\n'
1246 '\turl = https://code.rhodecode.com/dulwich\n'
1248 )
1247 )
1249 for _ in range(3):
1248 for _ in range(3):
1250 commit._get_submodule_url('subrepo1')
1249 commit._get_submodule_url('subrepo1')
1251 get_node_mock.assert_called_once_with('.gitmodules')
1250 get_node_mock.assert_called_once_with('.gitmodules')
1252
1251
1253 def test_get_node_returns_a_link(self):
1252 def test_get_node_returns_a_link(self):
1254 repository = mock.Mock()
1253 repository = mock.Mock()
1255 repository.alias = 'git'
1254 repository.alias = 'git'
1256 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1255 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1257 submodule_url = 'https://code.rhodecode.com/dulwich'
1256 submodule_url = 'https://code.rhodecode.com/dulwich'
1258 get_id_patch = mock.patch.object(
1257 get_id_patch = mock.patch.object(
1259 commit, '_get_id_for_path', return_value=(1, 'link'))
1258 commit, '_get_id_for_path', return_value=(1, 'link'))
1260 get_submodule_patch = mock.patch.object(
1259 get_submodule_patch = mock.patch.object(
1261 commit, '_get_submodule_url', return_value=submodule_url)
1260 commit, '_get_submodule_url', return_value=submodule_url)
1262
1261
1263 with get_id_patch, get_submodule_patch as submodule_mock:
1262 with get_id_patch, get_submodule_patch as submodule_mock:
1264 node = commit.get_node('/abcde')
1263 node = commit.get_node('/abcde')
1265
1264
1266 submodule_mock.assert_called_once_with('/abcde')
1265 submodule_mock.assert_called_once_with('/abcde')
1267 assert type(node) == SubModuleNode
1266 assert type(node) == SubModuleNode
1268 assert node.url == submodule_url
1267 assert node.url == submodule_url
1269
1268
1270 def test_get_nodes_returns_links(self):
1269 def test_get_nodes_returns_links(self):
1271 repository = mock.MagicMock()
1270 repository = mock.MagicMock()
1272 repository.alias = 'git'
1271 repository.alias = 'git'
1273 repository._remote.tree_items.return_value = [
1272 repository._remote.tree_items.return_value = [
1274 ('subrepo', 'stat', 1, 'link')
1273 ('subrepo', 'stat', 1, 'link')
1275 ]
1274 ]
1276 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1275 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1277 submodule_url = 'https://code.rhodecode.com/dulwich'
1276 submodule_url = 'https://code.rhodecode.com/dulwich'
1278 get_id_patch = mock.patch.object(
1277 get_id_patch = mock.patch.object(
1279 commit, '_get_id_for_path', return_value=(1, 'tree'))
1278 commit, '_get_id_for_path', return_value=(1, 'tree'))
1280 get_submodule_patch = mock.patch.object(
1279 get_submodule_patch = mock.patch.object(
1281 commit, '_get_submodule_url', return_value=submodule_url)
1280 commit, '_get_submodule_url', return_value=submodule_url)
1282
1281
1283 with get_id_patch, get_submodule_patch as submodule_mock:
1282 with get_id_patch, get_submodule_patch as submodule_mock:
1284 nodes = commit.get_nodes('/abcde')
1283 nodes = commit.get_nodes('/abcde')
1285
1284
1286 submodule_mock.assert_called_once_with('/abcde/subrepo')
1285 submodule_mock.assert_called_once_with('/abcde/subrepo')
1287 assert len(nodes) == 1
1286 assert len(nodes) == 1
1288 assert type(nodes[0]) == SubModuleNode
1287 assert type(nodes[0]) == SubModuleNode
1289 assert nodes[0].url == submodule_url
1288 assert nodes[0].url == submodule_url
@@ -1,1186 +1,1186 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.utils import make_db_config
26 from rhodecode.lib.utils import make_db_config
27 from rhodecode.lib.vcs import backends
27 from rhodecode.lib.vcs import backends
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 Reference, MergeResponse, MergeFailureReason)
29 Reference, MergeResponse, MergeFailureReason)
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 from rhodecode.lib.vcs.exceptions import (
31 from rhodecode.lib.vcs.exceptions import (
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
35
35
36
36
37 pytestmark = pytest.mark.backends("hg")
37 pytestmark = pytest.mark.backends("hg")
38
38
39
39
40 def repo_path_generator():
40 def repo_path_generator():
41 """
41 """
42 Return a different path to be used for cloning repos.
42 Return a different path to be used for cloning repos.
43 """
43 """
44 i = 0
44 i = 0
45 while True:
45 while True:
46 i += 1
46 i += 1
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
48
48
49 REPO_PATH_GENERATOR = repo_path_generator()
49 REPO_PATH_GENERATOR = repo_path_generator()
50
50
51
51
52 @pytest.fixture(scope='class', autouse=True)
52 @pytest.fixture(scope='class', autouse=True)
53 def repo(request, baseapp):
53 def repo(request, baseapp):
54 repo = MercurialRepository(TEST_HG_REPO)
54 repo = MercurialRepository(TEST_HG_REPO)
55 if request.cls:
55 if request.cls:
56 request.cls.repo = repo
56 request.cls.repo = repo
57 return repo
57 return repo
58
58
59
59
60 class TestMercurialRepository:
60 class TestMercurialRepository:
61
61
62 # pylint: disable=protected-access
62 # pylint: disable=protected-access
63
63
64 def get_clone_repo(self):
64 def get_clone_repo(self):
65 """
65 """
66 Return a clone of the base repo.
66 Return a clone of the base repo.
67 """
67 """
68 clone_path = next(REPO_PATH_GENERATOR)
68 clone_path = next(REPO_PATH_GENERATOR)
69 repo_clone = MercurialRepository(
69 repo_clone = MercurialRepository(
70 clone_path, create=True, src_url=self.repo.path)
70 clone_path, create=True, src_url=self.repo.path)
71
71
72 return repo_clone
72 return repo_clone
73
73
74 def get_empty_repo(self):
74 def get_empty_repo(self):
75 """
75 """
76 Return an empty repo.
76 Return an empty repo.
77 """
77 """
78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
79
79
80 def test_wrong_repo_path(self):
80 def test_wrong_repo_path(self):
81 wrong_repo_path = '/tmp/errorrepo_hg'
81 wrong_repo_path = '/tmp/errorrepo_hg'
82 with pytest.raises(RepositoryError):
82 with pytest.raises(RepositoryError):
83 MercurialRepository(wrong_repo_path)
83 MercurialRepository(wrong_repo_path)
84
84
85 def test_unicode_path_repo(self):
85 def test_unicode_path_repo(self):
86 with pytest.raises(VCSError):
86 with pytest.raises(VCSError):
87 MercurialRepository(u'iShouldFail')
87 MercurialRepository(u'iShouldFail')
88
88
89 def test_unicode_commit_id(self):
89 def test_unicode_commit_id(self):
90 with pytest.raises(CommitDoesNotExistError):
90 with pytest.raises(CommitDoesNotExistError):
91 self.repo.get_commit(u'unicode-commit-id')
91 self.repo.get_commit(u'unicode-commit-id')
92 with pytest.raises(CommitDoesNotExistError):
92 with pytest.raises(CommitDoesNotExistError):
93 self.repo.get_commit(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
93 self.repo.get_commit(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
94
94
95 def test_unicode_bookmark(self):
95 def test_unicode_bookmark(self):
96 self.repo.bookmark(u'unicode-bookmark')
96 self.repo.bookmark(u'unicode-bookmark')
97 self.repo.bookmark(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
97 self.repo.bookmark(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
98
98
99 def test_unicode_branch(self):
99 def test_unicode_branch(self):
100 with pytest.raises(KeyError):
100 with pytest.raises(KeyError):
101 self.repo.branches[u'unicode-branch']
101 self.repo.branches[u'unicode-branch']
102 with pytest.raises(KeyError):
102 with pytest.raises(KeyError):
103 self.repo.branches[u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
103 self.repo.branches[u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
104
104
105 def test_repo_clone(self):
105 def test_repo_clone(self):
106 if os.path.exists(TEST_HG_REPO_CLONE):
106 if os.path.exists(TEST_HG_REPO_CLONE):
107 self.fail(
107 self.fail(
108 'Cannot test mercurial clone repo as location %s already '
108 'Cannot test mercurial clone repo as location %s already '
109 'exists. You should manually remove it first.'
109 'exists. You should manually remove it first.'
110 % TEST_HG_REPO_CLONE)
110 % TEST_HG_REPO_CLONE)
111
111
112 repo = MercurialRepository(TEST_HG_REPO)
112 repo = MercurialRepository(TEST_HG_REPO)
113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
114 src_url=TEST_HG_REPO)
114 src_url=TEST_HG_REPO)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 # Checking hashes of commits should be enough
116 # Checking hashes of commits should be enough
117 for commit in repo.get_commits():
117 for commit in repo.get_commits():
118 raw_id = commit.raw_id
118 raw_id = commit.raw_id
119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
120
120
121 def test_repo_clone_with_update(self):
121 def test_repo_clone_with_update(self):
122 repo = MercurialRepository(TEST_HG_REPO)
122 repo = MercurialRepository(TEST_HG_REPO)
123 repo_clone = MercurialRepository(
123 repo_clone = MercurialRepository(
124 TEST_HG_REPO_CLONE + '_w_update',
124 TEST_HG_REPO_CLONE + '_w_update',
125 src_url=TEST_HG_REPO, update_after_clone=True)
125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127
127
128 # check if current workdir was updated
128 # check if current workdir was updated
129 assert os.path.isfile(
129 assert os.path.isfile(
130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
131
131
132 def test_repo_clone_without_update(self):
132 def test_repo_clone_without_update(self):
133 repo = MercurialRepository(TEST_HG_REPO)
133 repo = MercurialRepository(TEST_HG_REPO)
134 repo_clone = MercurialRepository(
134 repo_clone = MercurialRepository(
135 TEST_HG_REPO_CLONE + '_wo_update',
135 TEST_HG_REPO_CLONE + '_wo_update',
136 src_url=TEST_HG_REPO, update_after_clone=False)
136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
138 assert not os.path.isfile(
138 assert not os.path.isfile(
139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
140
140
141 def test_commit_ids(self):
141 def test_commit_ids(self):
142 # there are 21 commits at bitbucket now
142 # there are 21 commits at bitbucket now
143 # so we can assume they would be available from now on
143 # so we can assume they would be available from now on
144 subset = set([
144 subset = set([
145 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
145 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
146 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
146 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
147 '6cba7170863a2411822803fa77a0a264f1310b35',
147 '6cba7170863a2411822803fa77a0a264f1310b35',
148 '56349e29c2af3ac913b28bde9a2c6154436e615b',
148 '56349e29c2af3ac913b28bde9a2c6154436e615b',
149 '2dda4e345facb0ccff1a191052dd1606dba6781d',
149 '2dda4e345facb0ccff1a191052dd1606dba6781d',
150 '6fff84722075f1607a30f436523403845f84cd9e',
150 '6fff84722075f1607a30f436523403845f84cd9e',
151 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
151 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
152 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
152 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
153 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
153 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
154 'be90031137367893f1c406e0a8683010fd115b79',
154 'be90031137367893f1c406e0a8683010fd115b79',
155 'db8e58be770518cbb2b1cdfa69146e47cd481481',
155 'db8e58be770518cbb2b1cdfa69146e47cd481481',
156 '84478366594b424af694a6c784cb991a16b87c21',
156 '84478366594b424af694a6c784cb991a16b87c21',
157 '17f8e105dddb9f339600389c6dc7175d395a535c',
157 '17f8e105dddb9f339600389c6dc7175d395a535c',
158 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
158 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
159 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
159 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
160 '786facd2c61deb9cf91e9534735124fb8fc11842',
160 '786facd2c61deb9cf91e9534735124fb8fc11842',
161 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
161 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
162 'aa6a0de05b7612707db567078e130a6cd114a9a7',
162 'aa6a0de05b7612707db567078e130a6cd114a9a7',
163 'eada5a770da98ab0dd7325e29d00e0714f228d09'
163 'eada5a770da98ab0dd7325e29d00e0714f228d09'
164 ])
164 ])
165 assert subset.issubset(set(self.repo.commit_ids))
165 assert subset.issubset(set(self.repo.commit_ids))
166
166
167 # check if we have the proper order of commits
167 # check if we have the proper order of commits
168 org = [
168 org = [
169 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
169 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
170 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
170 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
171 '6cba7170863a2411822803fa77a0a264f1310b35',
171 '6cba7170863a2411822803fa77a0a264f1310b35',
172 '56349e29c2af3ac913b28bde9a2c6154436e615b',
172 '56349e29c2af3ac913b28bde9a2c6154436e615b',
173 '2dda4e345facb0ccff1a191052dd1606dba6781d',
173 '2dda4e345facb0ccff1a191052dd1606dba6781d',
174 '6fff84722075f1607a30f436523403845f84cd9e',
174 '6fff84722075f1607a30f436523403845f84cd9e',
175 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
175 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
176 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
176 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
177 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
177 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
178 'be90031137367893f1c406e0a8683010fd115b79',
178 'be90031137367893f1c406e0a8683010fd115b79',
179 'db8e58be770518cbb2b1cdfa69146e47cd481481',
179 'db8e58be770518cbb2b1cdfa69146e47cd481481',
180 '84478366594b424af694a6c784cb991a16b87c21',
180 '84478366594b424af694a6c784cb991a16b87c21',
181 '17f8e105dddb9f339600389c6dc7175d395a535c',
181 '17f8e105dddb9f339600389c6dc7175d395a535c',
182 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
182 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
183 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
183 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
184 '786facd2c61deb9cf91e9534735124fb8fc11842',
184 '786facd2c61deb9cf91e9534735124fb8fc11842',
185 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
185 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
186 'aa6a0de05b7612707db567078e130a6cd114a9a7',
186 'aa6a0de05b7612707db567078e130a6cd114a9a7',
187 'eada5a770da98ab0dd7325e29d00e0714f228d09',
187 'eada5a770da98ab0dd7325e29d00e0714f228d09',
188 '2c1885c735575ca478bf9e17b0029dca68824458',
188 '2c1885c735575ca478bf9e17b0029dca68824458',
189 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
189 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
190 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
190 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
191 '4fb8326d78e5120da2c7468dcf7098997be385da',
191 '4fb8326d78e5120da2c7468dcf7098997be385da',
192 '62b4a097164940bd66030c4db51687f3ec035eed',
192 '62b4a097164940bd66030c4db51687f3ec035eed',
193 '536c1a19428381cfea92ac44985304f6a8049569',
193 '536c1a19428381cfea92ac44985304f6a8049569',
194 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
194 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
195 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
195 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
196 'f8940bcb890a98c4702319fbe36db75ea309b475',
196 'f8940bcb890a98c4702319fbe36db75ea309b475',
197 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
197 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
198 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
198 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
199 'ee87846a61c12153b51543bf860e1026c6d3dcba',
199 'ee87846a61c12153b51543bf860e1026c6d3dcba',
200 ]
200 ]
201 assert org == self.repo.commit_ids[:31]
201 assert org == self.repo.commit_ids[:31]
202
202
203 def test_iter_slice(self):
203 def test_iter_slice(self):
204 sliced = list(self.repo[:10])
204 sliced = list(self.repo[:10])
205 itered = list(self.repo)[:10]
205 itered = list(self.repo)[:10]
206 assert sliced == itered
206 assert sliced == itered
207
207
208 def test_slicing(self):
208 def test_slicing(self):
209 # 4 1 5 10 95
209 # 4 1 5 10 95
210 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
210 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
211 (10, 20, 10), (5, 100, 95)]:
211 (10, 20, 10), (5, 100, 95)]:
212 indexes = list(self.repo[sfrom:sto])
212 indexes = list(self.repo[sfrom:sto])
213 assert len(indexes) == size
213 assert len(indexes) == size
214 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
214 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
215 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
215 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
216
216
217 def test_branches(self):
217 def test_branches(self):
218 # TODO: Need more tests here
218 # TODO: Need more tests here
219
219
220 # active branches
220 # active branches
221 assert 'default' in self.repo.branches
221 assert 'default' in self.repo.branches
222 assert 'stable' in self.repo.branches
222 assert 'stable' in self.repo.branches
223
223
224 # closed
224 # closed
225 assert 'git' in self.repo._get_branches(closed=True)
225 assert 'git' in self.repo._get_branches(closed=True)
226 assert 'web' in self.repo._get_branches(closed=True)
226 assert 'web' in self.repo._get_branches(closed=True)
227
227
228 for name, id in self.repo.branches.items():
228 for name, id in self.repo.branches.items():
229 assert isinstance(self.repo.get_commit(id), MercurialCommit)
229 assert isinstance(self.repo.get_commit(id), MercurialCommit)
230
230
231 def test_tip_in_tags(self):
231 def test_tip_in_tags(self):
232 # tip is always a tag
232 # tip is always a tag
233 assert 'tip' in self.repo.tags
233 assert 'tip' in self.repo.tags
234
234
235 def test_tip_commit_in_tags(self):
235 def test_tip_commit_in_tags(self):
236 tip = self.repo.get_commit()
236 tip = self.repo.get_commit()
237 assert self.repo.tags['tip'] == tip.raw_id
237 assert self.repo.tags['tip'] == tip.raw_id
238
238
239 def test_initial_commit(self):
239 def test_initial_commit(self):
240 init_commit = self.repo.get_commit(commit_idx=0)
240 init_commit = self.repo.get_commit(commit_idx=0)
241 init_author = init_commit.author
241 init_author = init_commit.author
242
242
243 assert init_commit.message == 'initial import'
243 assert init_commit.message == 'initial import'
244 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
244 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
245 assert init_author == init_commit.committer
245 assert init_author == init_commit.committer
246 assert sorted(init_commit._file_paths) == sorted([
246 assert sorted(init_commit._file_paths) == sorted([
247 'vcs/__init__.py',
247 'vcs/__init__.py',
248 'vcs/backends/BaseRepository.py',
248 'vcs/backends/BaseRepository.py',
249 'vcs/backends/__init__.py',
249 'vcs/backends/__init__.py',
250 ])
250 ])
251 assert sorted(init_commit._dir_paths) == sorted(
251 assert sorted(init_commit._dir_paths) == sorted(
252 ['', 'vcs', 'vcs/backends'])
252 ['', 'vcs', 'vcs/backends'])
253
253
254 assert init_commit._dir_paths + init_commit._file_paths == \
254 assert init_commit._dir_paths + init_commit._file_paths == \
255 init_commit._paths
255 init_commit._paths
256
256
257 with pytest.raises(NodeDoesNotExistError):
257 with pytest.raises(NodeDoesNotExistError):
258 init_commit.get_node(path='foobar')
258 init_commit.get_node(path='foobar')
259
259
260 node = init_commit.get_node('vcs/')
260 node = init_commit.get_node('vcs/')
261 assert hasattr(node, 'kind')
261 assert hasattr(node, 'kind')
262 assert node.kind == NodeKind.DIR
262 assert node.kind == NodeKind.DIR
263
263
264 node = init_commit.get_node('vcs')
264 node = init_commit.get_node('vcs')
265 assert hasattr(node, 'kind')
265 assert hasattr(node, 'kind')
266 assert node.kind == NodeKind.DIR
266 assert node.kind == NodeKind.DIR
267
267
268 node = init_commit.get_node('vcs/__init__.py')
268 node = init_commit.get_node('vcs/__init__.py')
269 assert hasattr(node, 'kind')
269 assert hasattr(node, 'kind')
270 assert node.kind == NodeKind.FILE
270 assert node.kind == NodeKind.FILE
271
271
272 def test_not_existing_commit(self):
272 def test_not_existing_commit(self):
273 # rawid
273 # rawid
274 with pytest.raises(RepositoryError):
274 with pytest.raises(RepositoryError):
275 self.repo.get_commit('abcd' * 10)
275 self.repo.get_commit('abcd' * 10)
276 # shortid
276 # shortid
277 with pytest.raises(RepositoryError):
277 with pytest.raises(RepositoryError):
278 self.repo.get_commit('erro' * 4)
278 self.repo.get_commit('erro' * 4)
279 # numeric
279 # numeric
280 with pytest.raises(RepositoryError):
280 with pytest.raises(RepositoryError):
281 self.repo.get_commit(commit_idx=self.repo.count() + 1)
281 self.repo.get_commit(commit_idx=self.repo.count() + 1)
282
282
283 # Small chance we ever get to this one
283 # Small chance we ever get to this one
284 idx = pow(2, 30)
284 idx = pow(2, 30)
285 with pytest.raises(RepositoryError):
285 with pytest.raises(RepositoryError):
286 self.repo.get_commit(commit_idx=idx)
286 self.repo.get_commit(commit_idx=idx)
287
287
288 def test_commit10(self):
288 def test_commit10(self):
289 commit10 = self.repo.get_commit(commit_idx=10)
289 commit10 = self.repo.get_commit(commit_idx=10)
290 README = """===
290 README = """===
291 VCS
291 VCS
292 ===
292 ===
293
293
294 Various Version Control System management abstraction layer for Python.
294 Various Version Control System management abstraction layer for Python.
295
295
296 Introduction
296 Introduction
297 ------------
297 ------------
298
298
299 TODO: To be written...
299 TODO: To be written...
300
300
301 """
301 """
302 node = commit10.get_node('README.rst')
302 node = commit10.get_node('README.rst')
303 assert node.kind == NodeKind.FILE
303 assert node.kind == NodeKind.FILE
304 assert node.content == README
304 assert node.content == README
305
305
306 def test_local_clone(self):
306 def test_local_clone(self):
307 clone_path = next(REPO_PATH_GENERATOR)
307 clone_path = next(REPO_PATH_GENERATOR)
308 self.repo._local_clone(clone_path)
308 self.repo._local_clone(clone_path)
309 repo_clone = MercurialRepository(clone_path)
309 repo_clone = MercurialRepository(clone_path)
310
310
311 assert self.repo.commit_ids == repo_clone.commit_ids
311 assert self.repo.commit_ids == repo_clone.commit_ids
312
312
313 def test_local_clone_fails_if_target_exists(self):
313 def test_local_clone_fails_if_target_exists(self):
314 with pytest.raises(RepositoryError):
314 with pytest.raises(RepositoryError):
315 self.repo._local_clone(self.repo.path)
315 self.repo._local_clone(self.repo.path)
316
316
317 def test_update(self):
317 def test_update(self):
318 repo_clone = self.get_clone_repo()
318 repo_clone = self.get_clone_repo()
319 branches = repo_clone.branches
319 branches = repo_clone.branches
320
320
321 repo_clone._update('default')
321 repo_clone._update('default')
322 assert branches['default'] == repo_clone._identify()
322 assert branches['default'] == repo_clone._identify()
323 repo_clone._update('stable')
323 repo_clone._update('stable')
324 assert branches['stable'] == repo_clone._identify()
324 assert branches['stable'] == repo_clone._identify()
325
325
326 def test_local_pull_branch(self):
326 def test_local_pull_branch(self):
327 target_repo = self.get_empty_repo()
327 target_repo = self.get_empty_repo()
328 source_repo = self.get_clone_repo()
328 source_repo = self.get_clone_repo()
329
329
330 default = Reference(
330 default = Reference(
331 'branch', 'default', source_repo.branches['default'])
331 'branch', 'default', source_repo.branches['default'])
332 target_repo._local_pull(source_repo.path, default)
332 target_repo._local_pull(source_repo.path, default)
333 target_repo = MercurialRepository(target_repo.path)
333 target_repo = MercurialRepository(target_repo.path)
334 assert (target_repo.branches['default'] ==
334 assert (target_repo.branches['default'] ==
335 source_repo.branches['default'])
335 source_repo.branches['default'])
336
336
337 stable = Reference('branch', 'stable', source_repo.branches['stable'])
337 stable = Reference('branch', 'stable', source_repo.branches['stable'])
338 target_repo._local_pull(source_repo.path, stable)
338 target_repo._local_pull(source_repo.path, stable)
339 target_repo = MercurialRepository(target_repo.path)
339 target_repo = MercurialRepository(target_repo.path)
340 assert target_repo.branches['stable'] == source_repo.branches['stable']
340 assert target_repo.branches['stable'] == source_repo.branches['stable']
341
341
342 def test_local_pull_bookmark(self):
342 def test_local_pull_bookmark(self):
343 target_repo = self.get_empty_repo()
343 target_repo = self.get_empty_repo()
344 source_repo = self.get_clone_repo()
344 source_repo = self.get_clone_repo()
345
345
346 commits = list(source_repo.get_commits(branch_name='default'))
346 commits = list(source_repo.get_commits(branch_name='default'))
347 foo1_id = commits[-5].raw_id
347 foo1_id = commits[-5].raw_id
348 foo1 = Reference('book', 'foo1', foo1_id)
348 foo1 = Reference('book', 'foo1', foo1_id)
349 source_repo._update(foo1_id)
349 source_repo._update(foo1_id)
350 source_repo.bookmark('foo1')
350 source_repo.bookmark('foo1')
351
351
352 foo2_id = commits[-3].raw_id
352 foo2_id = commits[-3].raw_id
353 foo2 = Reference('book', 'foo2', foo2_id)
353 foo2 = Reference('book', 'foo2', foo2_id)
354 source_repo._update(foo2_id)
354 source_repo._update(foo2_id)
355 source_repo.bookmark('foo2')
355 source_repo.bookmark('foo2')
356
356
357 target_repo._local_pull(source_repo.path, foo1)
357 target_repo._local_pull(source_repo.path, foo1)
358 target_repo = MercurialRepository(target_repo.path)
358 target_repo = MercurialRepository(target_repo.path)
359 assert target_repo.branches['default'] == commits[-5].raw_id
359 assert target_repo.branches['default'] == commits[-5].raw_id
360
360
361 target_repo._local_pull(source_repo.path, foo2)
361 target_repo._local_pull(source_repo.path, foo2)
362 target_repo = MercurialRepository(target_repo.path)
362 target_repo = MercurialRepository(target_repo.path)
363 assert target_repo.branches['default'] == commits[-3].raw_id
363 assert target_repo.branches['default'] == commits[-3].raw_id
364
364
365 def test_local_pull_commit(self):
365 def test_local_pull_commit(self):
366 target_repo = self.get_empty_repo()
366 target_repo = self.get_empty_repo()
367 source_repo = self.get_clone_repo()
367 source_repo = self.get_clone_repo()
368
368
369 commits = list(source_repo.get_commits(branch_name='default'))
369 commits = list(source_repo.get_commits(branch_name='default'))
370 commit_id = commits[-5].raw_id
370 commit_id = commits[-5].raw_id
371 commit = Reference('rev', commit_id, commit_id)
371 commit = Reference('rev', commit_id, commit_id)
372 target_repo._local_pull(source_repo.path, commit)
372 target_repo._local_pull(source_repo.path, commit)
373 target_repo = MercurialRepository(target_repo.path)
373 target_repo = MercurialRepository(target_repo.path)
374 assert target_repo.branches['default'] == commit_id
374 assert target_repo.branches['default'] == commit_id
375
375
376 commit_id = commits[-3].raw_id
376 commit_id = commits[-3].raw_id
377 commit = Reference('rev', commit_id, commit_id)
377 commit = Reference('rev', commit_id, commit_id)
378 target_repo._local_pull(source_repo.path, commit)
378 target_repo._local_pull(source_repo.path, commit)
379 target_repo = MercurialRepository(target_repo.path)
379 target_repo = MercurialRepository(target_repo.path)
380 assert target_repo.branches['default'] == commit_id
380 assert target_repo.branches['default'] == commit_id
381
381
382 def test_local_pull_from_same_repo(self):
382 def test_local_pull_from_same_repo(self):
383 reference = Reference('branch', 'default', None)
383 reference = Reference('branch', 'default', None)
384 with pytest.raises(ValueError):
384 with pytest.raises(ValueError):
385 self.repo._local_pull(self.repo.path, reference)
385 self.repo._local_pull(self.repo.path, reference)
386
386
387 def test_validate_pull_reference_raises_on_missing_reference(
387 def test_validate_pull_reference_raises_on_missing_reference(
388 self, vcsbackend_hg):
388 self, vcsbackend_hg):
389 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
389 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
390 reference = Reference(
390 reference = Reference(
391 'book', 'invalid_reference', 'a' * 40)
391 'book', 'invalid_reference', 'a' * 40)
392
392
393 with pytest.raises(CommitDoesNotExistError):
393 with pytest.raises(CommitDoesNotExistError):
394 target_repo._validate_pull_reference(reference)
394 target_repo._validate_pull_reference(reference)
395
395
396 def test_heads(self):
396 def test_heads(self):
397 assert set(self.repo._heads()) == set(self.repo.branches.values())
397 assert set(self.repo._heads()) == set(self.repo.branches.values())
398
398
399 def test_ancestor(self):
399 def test_ancestor(self):
400 commits = [
400 commits = [
401 c.raw_id for c in self.repo.get_commits(branch_name='default')]
401 c.raw_id for c in self.repo.get_commits(branch_name='default')]
402 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
402 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
403 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
403 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
404
404
405 def test_local_push(self):
405 def test_local_push(self):
406 target_repo = self.get_empty_repo()
406 target_repo = self.get_empty_repo()
407
407
408 revisions = list(self.repo.get_commits(branch_name='default'))
408 revisions = list(self.repo.get_commits(branch_name='default'))
409 revision = revisions[-5].raw_id
409 revision = revisions[-5].raw_id
410 self.repo._local_push(revision, target_repo.path)
410 self.repo._local_push(revision, target_repo.path)
411
411
412 target_repo = MercurialRepository(target_repo.path)
412 target_repo = MercurialRepository(target_repo.path)
413
413
414 assert target_repo.branches['default'] == revision
414 assert target_repo.branches['default'] == revision
415
415
416 def test_hooks_can_be_enabled_for_local_push(self):
416 def test_hooks_can_be_enabled_for_local_push(self):
417 revision = 'deadbeef'
417 revision = 'deadbeef'
418 repo_path = 'test_group/test_repo'
418 repo_path = 'test_group/test_repo'
419 with mock.patch.object(self.repo, '_remote') as remote_mock:
419 with mock.patch.object(self.repo, '_remote') as remote_mock:
420 self.repo._local_push(revision, repo_path, enable_hooks=True)
420 self.repo._local_push(revision, repo_path, enable_hooks=True)
421 remote_mock.push.assert_called_once_with(
421 remote_mock.push.assert_called_once_with(
422 [revision], repo_path, hooks=True, push_branches=False)
422 [revision], repo_path, hooks=True, push_branches=False)
423
423
424 def test_local_merge(self, vcsbackend_hg):
424 def test_local_merge(self, vcsbackend_hg):
425 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
425 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
426 source_repo = vcsbackend_hg.clone_repo(target_repo)
426 source_repo = vcsbackend_hg.clone_repo(target_repo)
427 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
427 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
428 target_repo = MercurialRepository(target_repo.path)
428 target_repo = MercurialRepository(target_repo.path)
429 target_rev = target_repo.branches['default']
429 target_rev = target_repo.branches['default']
430 target_ref = Reference(
430 target_ref = Reference(
431 type='branch', name='default', commit_id=target_rev)
431 type='branch', name='default', commit_id=target_rev)
432 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
432 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
433 source_repo = MercurialRepository(source_repo.path)
433 source_repo = MercurialRepository(source_repo.path)
434 source_rev = source_repo.branches['default']
434 source_rev = source_repo.branches['default']
435 source_ref = Reference(
435 source_ref = Reference(
436 type='branch', name='default', commit_id=source_rev)
436 type='branch', name='default', commit_id=source_rev)
437
437
438 target_repo._local_pull(source_repo.path, source_ref)
438 target_repo._local_pull(source_repo.path, source_ref)
439
439
440 merge_message = 'Merge message\n\nDescription:...'
440 merge_message = 'Merge message\n\nDescription:...'
441 user_name = 'Albert Einstein'
441 user_name = 'Albert Einstein'
442 user_email = 'albert@einstein.com'
442 user_email = 'albert@einstein.com'
443 merge_commit_id, needs_push = target_repo._local_merge(
443 merge_commit_id, needs_push = target_repo._local_merge(
444 target_ref, merge_message, user_name, user_email, source_ref)
444 target_ref, merge_message, user_name, user_email, source_ref)
445 assert needs_push
445 assert needs_push
446
446
447 target_repo = MercurialRepository(target_repo.path)
447 target_repo = MercurialRepository(target_repo.path)
448 assert target_repo.commit_ids[-3] == target_rev
448 assert target_repo.commit_ids[-3] == target_rev
449 assert target_repo.commit_ids[-2] == source_rev
449 assert target_repo.commit_ids[-2] == source_rev
450 last_commit = target_repo.get_commit(merge_commit_id)
450 last_commit = target_repo.get_commit(merge_commit_id)
451 assert last_commit.message.strip() == merge_message
451 assert last_commit.message.strip() == merge_message
452 assert last_commit.author == '%s <%s>' % (user_name, user_email)
452 assert last_commit.author == '%s <%s>' % (user_name, user_email)
453
453
454 assert not os.path.exists(
454 assert not os.path.exists(
455 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
455 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
456
456
457 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
457 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
458 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
458 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
459 source_repo = vcsbackend_hg.clone_repo(target_repo)
459 source_repo = vcsbackend_hg.clone_repo(target_repo)
460 target_rev = target_repo.branches['default']
460 target_rev = target_repo.branches['default']
461 target_ref = Reference(
461 target_ref = Reference(
462 type='branch', name='default', commit_id=target_rev)
462 type='branch', name='default', commit_id=target_rev)
463 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
463 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
464 source_repo = MercurialRepository(source_repo.path)
464 source_repo = MercurialRepository(source_repo.path)
465 source_rev = source_repo.branches['default']
465 source_rev = source_repo.branches['default']
466 source_ref = Reference(
466 source_ref = Reference(
467 type='branch', name='default', commit_id=source_rev)
467 type='branch', name='default', commit_id=source_rev)
468
468
469 target_repo._local_pull(source_repo.path, source_ref)
469 target_repo._local_pull(source_repo.path, source_ref)
470
470
471 merge_message = 'Merge message\n\nDescription:...'
471 merge_message = 'Merge message\n\nDescription:...'
472 user_name = 'Albert Einstein'
472 user_name = 'Albert Einstein'
473 user_email = 'albert@einstein.com'
473 user_email = 'albert@einstein.com'
474 merge_commit_id, needs_push = target_repo._local_merge(
474 merge_commit_id, needs_push = target_repo._local_merge(
475 target_ref, merge_message, user_name, user_email, source_ref)
475 target_ref, merge_message, user_name, user_email, source_ref)
476 assert merge_commit_id == source_rev
476 assert merge_commit_id == source_rev
477 assert needs_push
477 assert needs_push
478
478
479 target_repo = MercurialRepository(target_repo.path)
479 target_repo = MercurialRepository(target_repo.path)
480 assert target_repo.commit_ids[-2] == target_rev
480 assert target_repo.commit_ids[-2] == target_rev
481 assert target_repo.commit_ids[-1] == source_rev
481 assert target_repo.commit_ids[-1] == source_rev
482
482
483 assert not os.path.exists(
483 assert not os.path.exists(
484 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
484 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
485
485
486 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
486 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
487 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
487 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
488 target_rev = target_repo.branches['default']
488 target_rev = target_repo.branches['default']
489 target_ref = Reference(
489 target_ref = Reference(
490 type='branch', name='default', commit_id=target_rev)
490 type='branch', name='default', commit_id=target_rev)
491
491
492 merge_message = 'Merge message\n\nDescription:...'
492 merge_message = 'Merge message\n\nDescription:...'
493 user_name = 'Albert Einstein'
493 user_name = 'Albert Einstein'
494 user_email = 'albert@einstein.com'
494 user_email = 'albert@einstein.com'
495 merge_commit_id, needs_push = target_repo._local_merge(
495 merge_commit_id, needs_push = target_repo._local_merge(
496 target_ref, merge_message, user_name, user_email, target_ref)
496 target_ref, merge_message, user_name, user_email, target_ref)
497 assert merge_commit_id == target_rev
497 assert merge_commit_id == target_rev
498 assert not needs_push
498 assert not needs_push
499
499
500 target_repo = MercurialRepository(target_repo.path)
500 target_repo = MercurialRepository(target_repo.path)
501 assert target_repo.commit_ids[-1] == target_rev
501 assert target_repo.commit_ids[-1] == target_rev
502
502
503 assert not os.path.exists(
503 assert not os.path.exists(
504 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
504 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
505
505
506 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
506 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
507 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
507 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
508 source_repo = vcsbackend_hg.clone_repo(target_repo)
508 source_repo = vcsbackend_hg.clone_repo(target_repo)
509 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
509 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
510 target_repo = MercurialRepository(target_repo.path)
510 target_repo = MercurialRepository(target_repo.path)
511 target_rev = target_repo.branches['default']
511 target_rev = target_repo.branches['default']
512 target_ref = Reference(
512 target_ref = Reference(
513 type='branch', name='default', commit_id=target_rev)
513 type='branch', name='default', commit_id=target_rev)
514 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
514 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
515 source_repo = MercurialRepository(source_repo.path)
515 source_repo = MercurialRepository(source_repo.path)
516 source_rev = source_repo.branches['default']
516 source_rev = source_repo.branches['default']
517 source_ref = Reference(
517 source_ref = Reference(
518 type='branch', name='default', commit_id=source_rev)
518 type='branch', name='default', commit_id=source_rev)
519
519
520 target_repo._local_pull(source_repo.path, source_ref)
520 target_repo._local_pull(source_repo.path, source_ref)
521 with pytest.raises(RepositoryError):
521 with pytest.raises(RepositoryError):
522 target_repo._local_merge(
522 target_repo._local_merge(
523 target_ref, 'merge_message', 'user name', 'user@name.com',
523 target_ref, 'merge_message', 'user name', 'user@name.com',
524 source_ref)
524 source_ref)
525
525
526 # Check we are not left in an intermediate merge state
526 # Check we are not left in an intermediate merge state
527 assert not os.path.exists(
527 assert not os.path.exists(
528 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
528 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
529
529
530 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
530 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
531 commits = [
531 commits = [
532 {'message': 'a'},
532 {'message': 'a'},
533 {'message': 'b', 'branch': 'b'},
533 {'message': 'b', 'branch': 'b'},
534 ]
534 ]
535 repo = backend_hg.create_repo(commits)
535 repo = backend_hg.create_repo(commits)
536 commit_ids = backend_hg.commit_ids
536 commit_ids = backend_hg.commit_ids
537 target_ref = Reference(
537 target_ref = Reference(
538 type='branch', name='default', commit_id=commit_ids['a'])
538 type='branch', name='default', commit_id=commit_ids['a'])
539 source_ref = Reference(
539 source_ref = Reference(
540 type='branch', name='b', commit_id=commit_ids['b'])
540 type='branch', name='b', commit_id=commit_ids['b'])
541 merge_message = 'Merge message\n\nDescription:...'
541 merge_message = 'Merge message\n\nDescription:...'
542 user_name = 'Albert Einstein'
542 user_name = 'Albert Einstein'
543 user_email = 'albert@einstein.com'
543 user_email = 'albert@einstein.com'
544 vcs_repo = repo.scm_instance()
544 vcs_repo = repo.scm_instance()
545 merge_commit_id, needs_push = vcs_repo._local_merge(
545 merge_commit_id, needs_push = vcs_repo._local_merge(
546 target_ref, merge_message, user_name, user_email, source_ref)
546 target_ref, merge_message, user_name, user_email, source_ref)
547 assert merge_commit_id != source_ref.commit_id
547 assert merge_commit_id != source_ref.commit_id
548 assert needs_push is True
548 assert needs_push is True
549 commit = vcs_repo.get_commit(merge_commit_id)
549 commit = vcs_repo.get_commit(merge_commit_id)
550 assert commit.merge is True
550 assert commit.merge is True
551 assert commit.message == merge_message
551 assert commit.message == merge_message
552
552
553 def test_maybe_prepare_merge_workspace(self):
553 def test_maybe_prepare_merge_workspace(self):
554 workspace = self.repo._maybe_prepare_merge_workspace(
554 workspace = self.repo._maybe_prepare_merge_workspace(
555 1, 'pr2', 'unused', 'unused2')
555 1, 'pr2', 'unused', 'unused2')
556
556
557 assert os.path.isdir(workspace)
557 assert os.path.isdir(workspace)
558 workspace_repo = MercurialRepository(workspace)
558 workspace_repo = MercurialRepository(workspace)
559 assert workspace_repo.branches == self.repo.branches
559 assert workspace_repo.branches == self.repo.branches
560
560
561 # Calling it a second time should also succeed
561 # Calling it a second time should also succeed
562 workspace = self.repo._maybe_prepare_merge_workspace(
562 workspace = self.repo._maybe_prepare_merge_workspace(
563 1, 'pr2', 'unused', 'unused2')
563 1, 'pr2', 'unused', 'unused2')
564 assert os.path.isdir(workspace)
564 assert os.path.isdir(workspace)
565
565
566 def test_cleanup_merge_workspace(self):
566 def test_cleanup_merge_workspace(self):
567 workspace = self.repo._maybe_prepare_merge_workspace(
567 workspace = self.repo._maybe_prepare_merge_workspace(
568 1, 'pr3', 'unused', 'unused2')
568 1, 'pr3', 'unused', 'unused2')
569
569
570 assert os.path.isdir(workspace)
570 assert os.path.isdir(workspace)
571 self.repo.cleanup_merge_workspace(1, 'pr3')
571 self.repo.cleanup_merge_workspace(1, 'pr3')
572
572
573 assert not os.path.exists(workspace)
573 assert not os.path.exists(workspace)
574
574
575 def test_cleanup_merge_workspace_invalid_workspace_id(self):
575 def test_cleanup_merge_workspace_invalid_workspace_id(self):
576 # No assert: because in case of an inexistent workspace this function
576 # No assert: because in case of an inexistent workspace this function
577 # should still succeed.
577 # should still succeed.
578 self.repo.cleanup_merge_workspace(1, 'pr4')
578 self.repo.cleanup_merge_workspace(1, 'pr4')
579
579
580 def test_merge_target_is_bookmark(self, vcsbackend_hg):
580 def test_merge_target_is_bookmark(self, vcsbackend_hg):
581 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
581 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
582 source_repo = vcsbackend_hg.clone_repo(target_repo)
582 source_repo = vcsbackend_hg.clone_repo(target_repo)
583 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
583 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
584 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
584 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
585 imc = source_repo.in_memory_commit
585 imc = source_repo.in_memory_commit
586 imc.add(FileNode('file_x', content=source_repo.name))
586 imc.add(FileNode('file_x', content=source_repo.name))
587 imc.commit(
587 imc.commit(
588 message=u'Automatic commit from repo merge test',
588 message=u'Automatic commit from repo merge test',
589 author=u'Automatic')
589 author=u'Automatic')
590 target_commit = target_repo.get_commit()
590 target_commit = target_repo.get_commit()
591 source_commit = source_repo.get_commit()
591 source_commit = source_repo.get_commit()
592 default_branch = target_repo.DEFAULT_BRANCH_NAME
592 default_branch = target_repo.DEFAULT_BRANCH_NAME
593 bookmark_name = 'bookmark'
593 bookmark_name = 'bookmark'
594 target_repo._update(default_branch)
594 target_repo._update(default_branch)
595 target_repo.bookmark(bookmark_name)
595 target_repo.bookmark(bookmark_name)
596 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
596 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
597 source_ref = Reference('branch', default_branch, source_commit.raw_id)
597 source_ref = Reference('branch', default_branch, source_commit.raw_id)
598 workspace_id = 'test-merge'
598 workspace_id = 'test-merge'
599 repo_id = repo_id_generator(target_repo.path)
599 repo_id = repo_id_generator(target_repo.path)
600 merge_response = target_repo.merge(
600 merge_response = target_repo.merge(
601 repo_id, workspace_id, target_ref, source_repo, source_ref,
601 repo_id, workspace_id, target_ref, source_repo, source_ref,
602 'test user', 'test@rhodecode.com', 'merge message 1',
602 'test user', 'test@rhodecode.com', 'merge message 1',
603 dry_run=False)
603 dry_run=False)
604 expected_merge_response = MergeResponse(
604 expected_merge_response = MergeResponse(
605 True, True, merge_response.merge_ref,
605 True, True, merge_response.merge_ref,
606 MergeFailureReason.NONE)
606 MergeFailureReason.NONE)
607 assert merge_response == expected_merge_response
607 assert merge_response == expected_merge_response
608
608
609 target_repo = backends.get_backend(vcsbackend_hg.alias)(
609 target_repo = backends.get_backend(vcsbackend_hg.alias)(
610 target_repo.path)
610 target_repo.path)
611 target_commits = list(target_repo.get_commits())
611 target_commits = list(target_repo.get_commits())
612 commit_ids = [c.raw_id for c in target_commits[:-1]]
612 commit_ids = [c.raw_id for c in target_commits[:-1]]
613 assert source_ref.commit_id in commit_ids
613 assert source_ref.commit_id in commit_ids
614 assert target_ref.commit_id in commit_ids
614 assert target_ref.commit_id in commit_ids
615
615
616 merge_commit = target_commits[-1]
616 merge_commit = target_commits[-1]
617 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
617 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
618 assert merge_commit.message.strip() == 'merge message 1'
618 assert merge_commit.message.strip() == 'merge message 1'
619 assert merge_commit.author == 'test user <test@rhodecode.com>'
619 assert merge_commit.author == 'test user <test@rhodecode.com>'
620
620
621 # Check the bookmark was updated in the target repo
621 # Check the bookmark was updated in the target repo
622 assert (
622 assert (
623 target_repo.bookmarks[bookmark_name] ==
623 target_repo.bookmarks[bookmark_name] ==
624 merge_response.merge_ref.commit_id)
624 merge_response.merge_ref.commit_id)
625
625
626 def test_merge_source_is_bookmark(self, vcsbackend_hg):
626 def test_merge_source_is_bookmark(self, vcsbackend_hg):
627 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
627 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
628 source_repo = vcsbackend_hg.clone_repo(target_repo)
628 source_repo = vcsbackend_hg.clone_repo(target_repo)
629 imc = source_repo.in_memory_commit
629 imc = source_repo.in_memory_commit
630 imc.add(FileNode('file_x', content=source_repo.name))
630 imc.add(FileNode('file_x', content=source_repo.name))
631 imc.commit(
631 imc.commit(
632 message=u'Automatic commit from repo merge test',
632 message=u'Automatic commit from repo merge test',
633 author=u'Automatic')
633 author=u'Automatic')
634 target_commit = target_repo.get_commit()
634 target_commit = target_repo.get_commit()
635 source_commit = source_repo.get_commit()
635 source_commit = source_repo.get_commit()
636 default_branch = target_repo.DEFAULT_BRANCH_NAME
636 default_branch = target_repo.DEFAULT_BRANCH_NAME
637 bookmark_name = 'bookmark'
637 bookmark_name = 'bookmark'
638 target_ref = Reference('branch', default_branch, target_commit.raw_id)
638 target_ref = Reference('branch', default_branch, target_commit.raw_id)
639 source_repo._update(default_branch)
639 source_repo._update(default_branch)
640 source_repo.bookmark(bookmark_name)
640 source_repo.bookmark(bookmark_name)
641 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
641 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
642 workspace_id = 'test-merge'
642 workspace_id = 'test-merge'
643 repo_id = repo_id_generator(target_repo.path)
643 repo_id = repo_id_generator(target_repo.path)
644 merge_response = target_repo.merge(
644 merge_response = target_repo.merge(
645 repo_id, workspace_id, target_ref, source_repo, source_ref,
645 repo_id, workspace_id, target_ref, source_repo, source_ref,
646 'test user', 'test@rhodecode.com', 'merge message 1',
646 'test user', 'test@rhodecode.com', 'merge message 1',
647 dry_run=False)
647 dry_run=False)
648 expected_merge_response = MergeResponse(
648 expected_merge_response = MergeResponse(
649 True, True, merge_response.merge_ref,
649 True, True, merge_response.merge_ref,
650 MergeFailureReason.NONE)
650 MergeFailureReason.NONE)
651 assert merge_response == expected_merge_response
651 assert merge_response == expected_merge_response
652
652
653 target_repo = backends.get_backend(vcsbackend_hg.alias)(
653 target_repo = backends.get_backend(vcsbackend_hg.alias)(
654 target_repo.path)
654 target_repo.path)
655 target_commits = list(target_repo.get_commits())
655 target_commits = list(target_repo.get_commits())
656 commit_ids = [c.raw_id for c in target_commits]
656 commit_ids = [c.raw_id for c in target_commits]
657 assert source_ref.commit_id == commit_ids[-1]
657 assert source_ref.commit_id == commit_ids[-1]
658 assert target_ref.commit_id == commit_ids[-2]
658 assert target_ref.commit_id == commit_ids[-2]
659
659
660 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
660 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
661 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
661 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
662 source_repo = vcsbackend_hg.clone_repo(target_repo)
662 source_repo = vcsbackend_hg.clone_repo(target_repo)
663 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
663 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
664 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
664 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
665
665
666 # add an extra head to the target repo
666 # add an extra head to the target repo
667 imc = target_repo.in_memory_commit
667 imc = target_repo.in_memory_commit
668 imc.add(FileNode('file_x', content='foo'))
668 imc.add(FileNode('file_x', content='foo'))
669 commits = list(target_repo.get_commits())
669 commits = list(target_repo.get_commits())
670 imc.commit(
670 imc.commit(
671 message=u'Automatic commit from repo merge test',
671 message=u'Automatic commit from repo merge test',
672 author=u'Automatic', parents=commits[0:1])
672 author=u'Automatic', parents=commits[0:1])
673
673
674 target_commit = target_repo.get_commit()
674 target_commit = target_repo.get_commit()
675 source_commit = source_repo.get_commit()
675 source_commit = source_repo.get_commit()
676 default_branch = target_repo.DEFAULT_BRANCH_NAME
676 default_branch = target_repo.DEFAULT_BRANCH_NAME
677 target_repo._update(default_branch)
677 target_repo._update(default_branch)
678
678
679 target_ref = Reference('branch', default_branch, target_commit.raw_id)
679 target_ref = Reference('branch', default_branch, target_commit.raw_id)
680 source_ref = Reference('branch', default_branch, source_commit.raw_id)
680 source_ref = Reference('branch', default_branch, source_commit.raw_id)
681 workspace_id = 'test-merge'
681 workspace_id = 'test-merge'
682
682
683 assert len(target_repo._heads(branch='default')) == 2
683 assert len(target_repo._heads(branch='default')) == 2
684 expected_merge_response = MergeResponse(
684 expected_merge_response = MergeResponse(
685 False, False, None,
685 False, False, None,
686 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
686 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
687 repo_id = repo_id_generator(target_repo.path)
687 repo_id = repo_id_generator(target_repo.path)
688 merge_response = target_repo.merge(
688 merge_response = target_repo.merge(
689 repo_id, workspace_id, target_ref, source_repo, source_ref,
689 repo_id, workspace_id, target_ref, source_repo, source_ref,
690 'test user', 'test@rhodecode.com', 'merge message 1',
690 'test user', 'test@rhodecode.com', 'merge message 1',
691 dry_run=False)
691 dry_run=False)
692 assert merge_response == expected_merge_response
692 assert merge_response == expected_merge_response
693
693
694 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
694 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
695 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
695 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
696 source_repo = vcsbackend_hg.clone_repo(target_repo)
696 source_repo = vcsbackend_hg.clone_repo(target_repo)
697 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
697 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
698 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
698 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
699 imc = source_repo.in_memory_commit
699 imc = source_repo.in_memory_commit
700 imc.add(FileNode('file_x', content=source_repo.name))
700 imc.add(FileNode('file_x', content=source_repo.name))
701 imc.commit(
701 imc.commit(
702 message=u'Automatic commit from repo merge test',
702 message=u'Automatic commit from repo merge test',
703 author=u'Automatic')
703 author=u'Automatic')
704 target_commit = target_repo.get_commit()
704 target_commit = target_repo.get_commit()
705 source_commit = source_repo.get_commit()
705 source_commit = source_repo.get_commit()
706
706
707 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
707 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
708
708
709 default_branch = target_repo.DEFAULT_BRANCH_NAME
709 default_branch = target_repo.DEFAULT_BRANCH_NAME
710 bookmark_name = 'bookmark'
710 bookmark_name = 'bookmark'
711 source_repo._update(default_branch)
711 source_repo._update(default_branch)
712 source_repo.bookmark(bookmark_name)
712 source_repo.bookmark(bookmark_name)
713
713
714 target_ref = Reference('branch', default_branch, target_commit.raw_id)
714 target_ref = Reference('branch', default_branch, target_commit.raw_id)
715 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
715 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
716 repo_id = repo_id_generator(target_repo.path)
716 repo_id = repo_id_generator(target_repo.path)
717 workspace_id = 'test-merge'
717 workspace_id = 'test-merge'
718
718
719 merge_response = target_repo.merge(
719 merge_response = target_repo.merge(
720 repo_id, workspace_id, target_ref, source_repo, source_ref,
720 repo_id, workspace_id, target_ref, source_repo, source_ref,
721 'test user', 'test@rhodecode.com', 'merge message 1',
721 'test user', 'test@rhodecode.com', 'merge message 1',
722 dry_run=False, use_rebase=True)
722 dry_run=False, use_rebase=True)
723
723
724 expected_merge_response = MergeResponse(
724 expected_merge_response = MergeResponse(
725 True, True, merge_response.merge_ref,
725 True, True, merge_response.merge_ref,
726 MergeFailureReason.NONE)
726 MergeFailureReason.NONE)
727 assert merge_response == expected_merge_response
727 assert merge_response == expected_merge_response
728
728
729 target_repo = backends.get_backend(vcsbackend_hg.alias)(
729 target_repo = backends.get_backend(vcsbackend_hg.alias)(
730 target_repo.path)
730 target_repo.path)
731 last_commit = target_repo.get_commit()
731 last_commit = target_repo.get_commit()
732 assert last_commit.message == source_commit.message
732 assert last_commit.message == source_commit.message
733 assert last_commit.author == source_commit.author
733 assert last_commit.author == source_commit.author
734 # This checks that we effectively did a rebase
734 # This checks that we effectively did a rebase
735 assert last_commit.raw_id != source_commit.raw_id
735 assert last_commit.raw_id != source_commit.raw_id
736
736
737 # Check the target has only 4 commits: 2 were already in target and
737 # Check the target has only 4 commits: 2 were already in target and
738 # only two should have been added
738 # only two should have been added
739 assert len(target_repo.commit_ids) == 2 + 2
739 assert len(target_repo.commit_ids) == 2 + 2
740
740
741
741
742 class TestGetShadowInstance(object):
742 class TestGetShadowInstance(object):
743
743
744 @pytest.fixture
744 @pytest.fixture
745 def repo(self, vcsbackend_hg, monkeypatch):
745 def repo(self, vcsbackend_hg, monkeypatch):
746 repo = vcsbackend_hg.repo
746 repo = vcsbackend_hg.repo
747 monkeypatch.setattr(repo, 'config', mock.Mock())
747 monkeypatch.setattr(repo, 'config', mock.Mock())
748 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
748 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
749 return repo
749 return repo
750
750
751 def test_passes_config(self, repo):
751 def test_passes_config(self, repo):
752 shadow = repo._get_shadow_instance(repo.path)
752 shadow = repo._get_shadow_instance(repo.path)
753 assert shadow.config == repo.config.copy()
753 assert shadow.config == repo.config.copy()
754
754
755 def test_disables_hooks(self, repo):
755 def test_disables_hooks(self, repo):
756 shadow = repo._get_shadow_instance(repo.path)
756 shadow = repo._get_shadow_instance(repo.path)
757 shadow.config.clear_section.assert_called_once_with('hooks')
757 shadow.config.clear_section.assert_called_once_with('hooks')
758
758
759 def test_allows_to_keep_hooks(self, repo):
759 def test_allows_to_keep_hooks(self, repo):
760 shadow = repo._get_shadow_instance(repo.path, enable_hooks=True)
760 shadow = repo._get_shadow_instance(repo.path, enable_hooks=True)
761 assert not shadow.config.clear_section.called
761 assert not shadow.config.clear_section.called
762
762
763
763
764 class TestMercurialCommit(object):
764 class TestMercurialCommit(object):
765
765
766 def _test_equality(self, commit):
766 def _test_equality(self, commit):
767 idx = commit.idx
767 idx = commit.idx
768 assert commit == self.repo.get_commit(commit_idx=idx)
768 assert commit == self.repo.get_commit(commit_idx=idx)
769
769
770 def test_equality(self):
770 def test_equality(self):
771 indexes = [0, 10, 20]
771 indexes = [0, 10, 20]
772 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
772 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
773 for commit in commits:
773 for commit in commits:
774 self._test_equality(commit)
774 self._test_equality(commit)
775
775
776 def test_default_commit(self):
776 def test_default_commit(self):
777 tip = self.repo.get_commit('tip')
777 tip = self.repo.get_commit('tip')
778 assert tip == self.repo.get_commit()
778 assert tip == self.repo.get_commit()
779 assert tip == self.repo.get_commit(commit_id=None)
779 assert tip == self.repo.get_commit(commit_id=None)
780 assert tip == self.repo.get_commit(commit_idx=None)
780 assert tip == self.repo.get_commit(commit_idx=None)
781 assert tip == list(self.repo[-1:])[0]
781 assert tip == list(self.repo[-1:])[0]
782
782
783 def test_root_node(self):
783 def test_root_node(self):
784 tip = self.repo.get_commit('tip')
784 tip = self.repo.get_commit('tip')
785 assert tip.root is tip.get_node('')
785 assert tip.root is tip.get_node('')
786
786
787 def test_lazy_fetch(self):
787 def test_lazy_fetch(self):
788 """
788 """
789 Test if commit's nodes expands and are cached as we walk through
789 Test if commit's nodes expands and are cached as we walk through
790 the commit. This test is somewhat hard to write as order of tests
790 the commit. This test is somewhat hard to write as order of tests
791 is a key here. Written by running command after command in a shell.
791 is a key here. Written by running command after command in a shell.
792 """
792 """
793 commit = self.repo.get_commit(commit_idx=45)
793 commit = self.repo.get_commit(commit_idx=45)
794 assert len(commit.nodes) == 0
794 assert len(commit.nodes) == 0
795 root = commit.root
795 root = commit.root
796 assert len(commit.nodes) == 1
796 assert len(commit.nodes) == 1
797 assert len(root.nodes) == 8
797 assert len(root.nodes) == 8
798 # accessing root.nodes updates commit.nodes
798 # accessing root.nodes updates commit.nodes
799 assert len(commit.nodes) == 9
799 assert len(commit.nodes) == 9
800
800
801 docs = root.get_node('docs')
801 docs = root.get_node('docs')
802 # we haven't yet accessed anything new as docs dir was already cached
802 # we haven't yet accessed anything new as docs dir was already cached
803 assert len(commit.nodes) == 9
803 assert len(commit.nodes) == 9
804 assert len(docs.nodes) == 8
804 assert len(docs.nodes) == 8
805 # accessing docs.nodes updates commit.nodes
805 # accessing docs.nodes updates commit.nodes
806 assert len(commit.nodes) == 17
806 assert len(commit.nodes) == 17
807
807
808 assert docs is commit.get_node('docs')
808 assert docs is commit.get_node('docs')
809 assert docs is root.nodes[0]
809 assert docs is root.nodes[0]
810 assert docs is root.dirs[0]
810 assert docs is root.dirs[0]
811 assert docs is commit.get_node('docs')
811 assert docs is commit.get_node('docs')
812
812
813 def test_nodes_with_commit(self):
813 def test_nodes_with_commit(self):
814 commit = self.repo.get_commit(commit_idx=45)
814 commit = self.repo.get_commit(commit_idx=45)
815 root = commit.root
815 root = commit.root
816 docs = root.get_node('docs')
816 docs = root.get_node('docs')
817 assert docs is commit.get_node('docs')
817 assert docs is commit.get_node('docs')
818 api = docs.get_node('api')
818 api = docs.get_node('api')
819 assert api is commit.get_node('docs/api')
819 assert api is commit.get_node('docs/api')
820 index = api.get_node('index.rst')
820 index = api.get_node('index.rst')
821 assert index is commit.get_node('docs/api/index.rst')
821 assert index is commit.get_node('docs/api/index.rst')
822 assert index is commit.get_node(
822 assert index is commit.get_node(
823 'docs').get_node('api').get_node('index.rst')
823 'docs').get_node('api').get_node('index.rst')
824
824
825 def test_branch_and_tags(self):
825 def test_branch_and_tags(self):
826 commit0 = self.repo.get_commit(commit_idx=0)
826 commit0 = self.repo.get_commit(commit_idx=0)
827 assert commit0.branch == 'default'
827 assert commit0.branch == 'default'
828 assert commit0.tags == []
828 assert commit0.tags == []
829
829
830 commit10 = self.repo.get_commit(commit_idx=10)
830 commit10 = self.repo.get_commit(commit_idx=10)
831 assert commit10.branch == 'default'
831 assert commit10.branch == 'default'
832 assert commit10.tags == []
832 assert commit10.tags == []
833
833
834 commit44 = self.repo.get_commit(commit_idx=44)
834 commit44 = self.repo.get_commit(commit_idx=44)
835 assert commit44.branch == 'web'
835 assert commit44.branch == 'web'
836
836
837 tip = self.repo.get_commit('tip')
837 tip = self.repo.get_commit('tip')
838 assert 'tip' in tip.tags
838 assert 'tip' in tip.tags
839
839
840 def test_bookmarks(self):
840 def test_bookmarks(self):
841 commit0 = self.repo.get_commit(commit_idx=0)
841 commit0 = self.repo.get_commit(commit_idx=0)
842 assert commit0.bookmarks == []
842 assert commit0.bookmarks == []
843
843
844 def _test_file_size(self, idx, path, size):
844 def _test_file_size(self, idx, path, size):
845 node = self.repo.get_commit(commit_idx=idx).get_node(path)
845 node = self.repo.get_commit(commit_idx=idx).get_node(path)
846 assert node.is_file()
846 assert node.is_file()
847 assert node.size == size
847 assert node.size == size
848
848
849 def test_file_size(self):
849 def test_file_size(self):
850 to_check = (
850 to_check = (
851 (10, 'setup.py', 1068),
851 (10, 'setup.py', 1068),
852 (20, 'setup.py', 1106),
852 (20, 'setup.py', 1106),
853 (60, 'setup.py', 1074),
853 (60, 'setup.py', 1074),
854
854
855 (10, 'vcs/backends/base.py', 2921),
855 (10, 'vcs/backends/base.py', 2921),
856 (20, 'vcs/backends/base.py', 3936),
856 (20, 'vcs/backends/base.py', 3936),
857 (60, 'vcs/backends/base.py', 6189),
857 (60, 'vcs/backends/base.py', 6189),
858 )
858 )
859 for idx, path, size in to_check:
859 for idx, path, size in to_check:
860 self._test_file_size(idx, path, size)
860 self._test_file_size(idx, path, size)
861
861
862 def test_file_history_from_commits(self):
862 def test_file_history_from_commits(self):
863 node = self.repo[10].get_node('setup.py')
863 node = self.repo[10].get_node('setup.py')
864 commit_ids = [commit.raw_id for commit in node.history]
864 commit_ids = [commit.raw_id for commit in node.history]
865 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
865 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
866
866
867 node = self.repo[20].get_node('setup.py')
867 node = self.repo[20].get_node('setup.py')
868 node_ids = [commit.raw_id for commit in node.history]
868 node_ids = [commit.raw_id for commit in node.history]
869 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
869 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
870 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
870 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
871
871
872 # special case we check history from commit that has this particular
872 # special case we check history from commit that has this particular
873 # file changed this means we check if it's included as well
873 # file changed this means we check if it's included as well
874 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
874 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
875 .get_node('setup.py')
875 .get_node('setup.py')
876 node_ids = [commit.raw_id for commit in node.history]
876 node_ids = [commit.raw_id for commit in node.history]
877 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
877 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
878 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
878 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
879
879
880 def test_file_history(self):
880 def test_file_history(self):
881 # we can only check if those commits are present in the history
881 # we can only check if those commits are present in the history
882 # as we cannot update this test every time file is changed
882 # as we cannot update this test every time file is changed
883 files = {
883 files = {
884 'setup.py': [7, 18, 45, 46, 47, 69, 77],
884 'setup.py': [7, 18, 45, 46, 47, 69, 77],
885 'vcs/nodes.py': [
885 'vcs/nodes.py': [
886 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
886 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
887 'vcs/backends/hg.py': [
887 'vcs/backends/hg.py': [
888 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
888 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
889 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
889 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
890 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
890 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
891 }
891 }
892 for path, indexes in files.items():
892 for path, indexes in files.items():
893 tip = self.repo.get_commit(commit_idx=indexes[-1])
893 tip = self.repo.get_commit(commit_idx=indexes[-1])
894 node = tip.get_node(path)
894 node = tip.get_node(path)
895 node_indexes = [commit.idx for commit in node.history]
895 node_indexes = [commit.idx for commit in node.history]
896 assert set(indexes).issubset(set(node_indexes)), (
896 assert set(indexes).issubset(set(node_indexes)), (
897 "We assumed that %s is subset of commits for which file %s "
897 "We assumed that %s is subset of commits for which file %s "
898 "has been changed, and history of that node returned: %s"
898 "has been changed, and history of that node returned: %s"
899 % (indexes, path, node_indexes))
899 % (indexes, path, node_indexes))
900
900
901 def test_file_annotate(self):
901 def test_file_annotate(self):
902 files = {
902 files = {
903 'vcs/backends/__init__.py': {
903 'vcs/backends/__init__.py': {
904 89: {
904 89: {
905 'lines_no': 31,
905 'lines_no': 31,
906 'commits': [
906 'commits': [
907 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
907 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
908 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
908 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
909 32, 32, 32, 32, 37, 32, 37, 37, 32,
909 32, 32, 32, 32, 37, 32, 37, 37, 32,
910 32, 32
910 32, 32
911 ]
911 ]
912 },
912 },
913 20: {
913 20: {
914 'lines_no': 1,
914 'lines_no': 1,
915 'commits': [4]
915 'commits': [4]
916 },
916 },
917 55: {
917 55: {
918 'lines_no': 31,
918 'lines_no': 31,
919 'commits': [
919 'commits': [
920 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
920 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
921 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
921 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
922 32, 32, 32, 32, 37, 32, 37, 37, 32,
922 32, 32, 32, 32, 37, 32, 37, 37, 32,
923 32, 32
923 32, 32
924 ]
924 ]
925 }
925 }
926 },
926 },
927 'vcs/exceptions.py': {
927 'vcs/exceptions.py': {
928 89: {
928 89: {
929 'lines_no': 18,
929 'lines_no': 18,
930 'commits': [
930 'commits': [
931 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
931 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
932 16, 16, 17, 16, 16, 18, 18, 18
932 16, 16, 17, 16, 16, 18, 18, 18
933 ]
933 ]
934 },
934 },
935 20: {
935 20: {
936 'lines_no': 18,
936 'lines_no': 18,
937 'commits': [
937 'commits': [
938 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
938 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
939 16, 16, 17, 16, 16, 18, 18, 18
939 16, 16, 17, 16, 16, 18, 18, 18
940 ]
940 ]
941 },
941 },
942 55: {
942 55: {
943 'lines_no': 18,
943 'lines_no': 18,
944 'commits': [
944 'commits': [
945 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
945 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
946 17, 16, 16, 18, 18, 18
946 17, 16, 16, 18, 18, 18
947 ]
947 ]
948 }
948 }
949 },
949 },
950 'MANIFEST.in': {
950 'MANIFEST.in': {
951 89: {
951 89: {
952 'lines_no': 5,
952 'lines_no': 5,
953 'commits': [7, 7, 7, 71, 71]
953 'commits': [7, 7, 7, 71, 71]
954 },
954 },
955 20: {
955 20: {
956 'lines_no': 3,
956 'lines_no': 3,
957 'commits': [7, 7, 7]
957 'commits': [7, 7, 7]
958 },
958 },
959 55: {
959 55: {
960 'lines_no': 3,
960 'lines_no': 3,
961 'commits': [7, 7, 7]
961 'commits': [7, 7, 7]
962 }
962 }
963 }
963 }
964 }
964 }
965
965
966 for fname, commit_dict in files.items():
966 for fname, commit_dict in files.items():
967 for idx, __ in commit_dict.items():
967 for idx, __ in commit_dict.items():
968 commit = self.repo.get_commit(commit_idx=idx)
968 commit = self.repo.get_commit(commit_idx=idx)
969 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
969 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
970 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
970 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
971 assert l1_1 == l1_2
971 assert l1_1 == l1_2
972 l1 = l1_2 = [
972 l1 = l1_2 = [
973 x[2]().idx for x in commit.get_file_annotate(fname)]
973 x[2]().idx for x in commit.get_file_annotate(fname)]
974 l2 = files[fname][idx]['commits']
974 l2 = files[fname][idx]['commits']
975 assert l1 == l2, (
975 assert l1 == l2, (
976 "The lists of commit for %s@commit_id%s"
976 "The lists of commit for %s@commit_id%s"
977 "from annotation list should match each other,"
977 "from annotation list should match each other,"
978 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
978 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
979
979
980 def test_commit_state(self):
980 def test_commit_state(self):
981 """
981 """
982 Tests which files have been added/changed/removed at particular commit
982 Tests which files have been added/changed/removed at particular commit
983 """
983 """
984
984
985 # commit_id 46ad32a4f974:
985 # commit_id 46ad32a4f974:
986 # hg st --rev 46ad32a4f974
986 # hg st --rev 46ad32a4f974
987 # changed: 13
987 # changed: 13
988 # added: 20
988 # added: 20
989 # removed: 1
989 # removed: 1
990 changed = set([
990 changed = set([
991 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
991 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
992 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
992 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
993 'vcs/__init__.py', 'vcs/backends/__init__.py',
993 'vcs/__init__.py', 'vcs/backends/__init__.py',
994 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
994 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
995 'vcs/utils/__init__.py'])
995 'vcs/utils/__init__.py'])
996
996
997 added = set([
997 added = set([
998 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
998 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
999 'docs/api/index.rst', 'docs/api/nodes.rst',
999 'docs/api/index.rst', 'docs/api/nodes.rst',
1000 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
1000 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
1001 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
1001 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
1002 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1002 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1003 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1003 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1004 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1004 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1005 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1005 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1006 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1006 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1007 'vcs/web/simplevcs/views.py'])
1007 'vcs/web/simplevcs/views.py'])
1008
1008
1009 removed = set(['docs/api.rst'])
1009 removed = set(['docs/api.rst'])
1010
1010
1011 commit64 = self.repo.get_commit('46ad32a4f974')
1011 commit64 = self.repo.get_commit('46ad32a4f974')
1012 assert set((node.path for node in commit64.added)) == added
1012 assert set((node.path for node in commit64.added)) == added
1013 assert set((node.path for node in commit64.changed)) == changed
1013 assert set((node.path for node in commit64.changed)) == changed
1014 assert set((node.path for node in commit64.removed)) == removed
1014 assert set((node.path for node in commit64.removed)) == removed
1015
1015
1016 # commit_id b090f22d27d6:
1016 # commit_id b090f22d27d6:
1017 # hg st --rev b090f22d27d6
1017 # hg st --rev b090f22d27d6
1018 # changed: 13
1018 # changed: 13
1019 # added: 20
1019 # added: 20
1020 # removed: 1
1020 # removed: 1
1021 commit88 = self.repo.get_commit('b090f22d27d6')
1021 commit88 = self.repo.get_commit('b090f22d27d6')
1022 assert set((node.path for node in commit88.added)) == set()
1022 assert set((node.path for node in commit88.added)) == set()
1023 assert set((node.path for node in commit88.changed)) == \
1023 assert set((node.path for node in commit88.changed)) == \
1024 set(['.hgignore'])
1024 set(['.hgignore'])
1025 assert set((node.path for node in commit88.removed)) == set()
1025 assert set((node.path for node in commit88.removed)) == set()
1026
1026
1027 #
1027 #
1028 # 85:
1028 # 85:
1029 # added: 2 [
1029 # added: 2 [
1030 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1030 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1031 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1031 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1032 # removed: 1 ['vcs/utils/web.py']
1032 # removed: 1 ['vcs/utils/web.py']
1033 commit85 = self.repo.get_commit(commit_idx=85)
1033 commit85 = self.repo.get_commit(commit_idx=85)
1034 assert set((node.path for node in commit85.added)) == set([
1034 assert set((node.path for node in commit85.added)) == set([
1035 'vcs/utils/diffs.py',
1035 'vcs/utils/diffs.py',
1036 'vcs/web/simplevcs/views/diffs.py'])
1036 'vcs/web/simplevcs/views/diffs.py'])
1037 assert set((node.path for node in commit85.changed)) == set([
1037 assert set((node.path for node in commit85.changed)) == set([
1038 'vcs/web/simplevcs/models.py',
1038 'vcs/web/simplevcs/models.py',
1039 'vcs/web/simplevcs/utils.py',
1039 'vcs/web/simplevcs/utils.py',
1040 'vcs/web/simplevcs/views/__init__.py',
1040 'vcs/web/simplevcs/views/__init__.py',
1041 'vcs/web/simplevcs/views/repository.py',
1041 'vcs/web/simplevcs/views/repository.py',
1042 ])
1042 ])
1043 assert set((node.path for node in commit85.removed)) == \
1043 assert set((node.path for node in commit85.removed)) == \
1044 set(['vcs/utils/web.py'])
1044 set(['vcs/utils/web.py'])
1045
1045
1046 def test_files_state(self):
1046 def test_files_state(self):
1047 """
1047 """
1048 Tests state of FileNodes.
1048 Tests state of FileNodes.
1049 """
1049 """
1050 commit = self.repo.get_commit(commit_idx=85)
1050 commit = self.repo.get_commit(commit_idx=85)
1051 node = commit.get_node('vcs/utils/diffs.py')
1051 node = commit.get_node('vcs/utils/diffs.py')
1052 assert node.state, NodeState.ADDED
1052 assert node.state, NodeState.ADDED
1053 assert node.added
1053 assert node.added
1054 assert not node.changed
1054 assert not node.changed
1055 assert not node.not_changed
1055 assert not node.not_changed
1056 assert not node.removed
1056 assert not node.removed
1057
1057
1058 commit = self.repo.get_commit(commit_idx=88)
1058 commit = self.repo.get_commit(commit_idx=88)
1059 node = commit.get_node('.hgignore')
1059 node = commit.get_node('.hgignore')
1060 assert node.state, NodeState.CHANGED
1060 assert node.state, NodeState.CHANGED
1061 assert not node.added
1061 assert not node.added
1062 assert node.changed
1062 assert node.changed
1063 assert not node.not_changed
1063 assert not node.not_changed
1064 assert not node.removed
1064 assert not node.removed
1065
1065
1066 commit = self.repo.get_commit(commit_idx=85)
1066 commit = self.repo.get_commit(commit_idx=85)
1067 node = commit.get_node('setup.py')
1067 node = commit.get_node('setup.py')
1068 assert node.state, NodeState.NOT_CHANGED
1068 assert node.state, NodeState.NOT_CHANGED
1069 assert not node.added
1069 assert not node.added
1070 assert not node.changed
1070 assert not node.changed
1071 assert node.not_changed
1071 assert node.not_changed
1072 assert not node.removed
1072 assert not node.removed
1073
1073
1074 # If node has REMOVED state then trying to fetch it would raise
1074 # If node has REMOVED state then trying to fetch it would raise
1075 # CommitError exception
1075 # CommitError exception
1076 commit = self.repo.get_commit(commit_idx=2)
1076 commit = self.repo.get_commit(commit_idx=2)
1077 path = 'vcs/backends/BaseRepository.py'
1077 path = 'vcs/backends/BaseRepository.py'
1078 with pytest.raises(NodeDoesNotExistError):
1078 with pytest.raises(NodeDoesNotExistError):
1079 commit.get_node(path)
1079 commit.get_node(path)
1080 # but it would be one of ``removed`` (commit's attribute)
1080 # but it would be one of ``removed`` (commit's attribute)
1081 assert path in [rf.path for rf in commit.removed]
1081 assert path in [rf.path for rf in commit.removed]
1082
1082
1083 def test_commit_message_is_unicode(self):
1083 def test_commit_message_is_unicode(self):
1084 for cm in self.repo:
1084 for cm in self.repo:
1085 assert type(cm.message) == unicode
1085 assert type(cm.message) == unicode
1086
1086
1087 def test_commit_author_is_unicode(self):
1087 def test_commit_author_is_unicode(self):
1088 for cm in self.repo:
1088 for cm in self.repo:
1089 assert type(cm.author) == unicode
1089 assert type(cm.author) == unicode
1090
1090
1091 def test_repo_files_content_is_unicode(self):
1091 def test_repo_files_content_is_unicode(self):
1092 test_commit = self.repo.get_commit(commit_idx=100)
1092 test_commit = self.repo.get_commit(commit_idx=100)
1093 for node in test_commit.get_node('/'):
1093 for node in test_commit.get_node('/'):
1094 if node.is_file():
1094 if node.is_file():
1095 assert type(node.content) == unicode
1095 assert type(node.content) == unicode
1096
1096
1097 def test_wrong_path(self):
1097 def test_wrong_path(self):
1098 # There is 'setup.py' in the root dir but not there:
1098 # There is 'setup.py' in the root dir but not there:
1099 path = 'foo/bar/setup.py'
1099 path = 'foo/bar/setup.py'
1100 with pytest.raises(VCSError):
1100 with pytest.raises(VCSError):
1101 self.repo.get_commit().get_node(path)
1101 self.repo.get_commit().get_node(path)
1102
1102
1103 def test_author_email(self):
1103 def test_author_email(self):
1104 assert 'marcin@python-blog.com' == \
1104 assert 'marcin@python-blog.com' == \
1105 self.repo.get_commit('b986218ba1c9').author_email
1105 self.repo.get_commit('b986218ba1c9').author_email
1106 assert 'lukasz.balcerzak@python-center.pl' == \
1106 assert 'lukasz.balcerzak@python-center.pl' == \
1107 self.repo.get_commit('3803844fdbd3').author_email
1107 self.repo.get_commit('3803844fdbd3').author_email
1108 assert '' == self.repo.get_commit('84478366594b').author_email
1108 assert '' == self.repo.get_commit('84478366594b').author_email
1109
1109
1110 def test_author_username(self):
1110 def test_author_username(self):
1111 assert 'Marcin Kuzminski' == \
1111 assert 'Marcin Kuzminski' == \
1112 self.repo.get_commit('b986218ba1c9').author_name
1112 self.repo.get_commit('b986218ba1c9').author_name
1113 assert 'Lukasz Balcerzak' == \
1113 assert 'Lukasz Balcerzak' == \
1114 self.repo.get_commit('3803844fdbd3').author_name
1114 self.repo.get_commit('3803844fdbd3').author_name
1115 assert 'marcink' == \
1115 assert 'marcink' == \
1116 self.repo.get_commit('84478366594b').author_name
1116 self.repo.get_commit('84478366594b').author_name
1117
1117
1118
1118
1119 class TestLargeFileRepo(object):
1119 class TestLargeFileRepo(object):
1120
1120
1121 def test_large_file(self, backend_hg):
1121 def test_large_file(self, backend_hg):
1122 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1122 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1123
1123
1124 tip = repo.scm_instance().get_commit()
1124 tip = repo.scm_instance().get_commit()
1125 node = tip.get_node('.hglf/thisfileislarge')
1125 node = tip.get_node('.hglf/thisfileislarge')
1126
1126
1127 lf_node = node.get_largefile_node()
1127 lf_node = node.get_largefile_node()
1128
1128
1129 assert lf_node.is_largefile() is True
1129 assert lf_node.is_largefile() is True
1130 assert lf_node.size == 1024000
1130 assert lf_node.size == 1024000
1131 assert lf_node.name == '.hglf/thisfileislarge'
1131 assert lf_node.name == '.hglf/thisfileislarge'
1132
1132
1133
1133
1134 class TestGetBranchName(object):
1134 class TestGetBranchName(object):
1135 def test_returns_ref_name_when_type_is_branch(self):
1135 def test_returns_ref_name_when_type_is_branch(self):
1136 ref = self._create_ref('branch', 'fake-name')
1136 ref = self._create_ref('branch', 'fake-name')
1137 result = self.repo._get_branch_name(ref)
1137 result = self.repo._get_branch_name(ref)
1138 assert result == ref.name
1138 assert result == ref.name
1139
1139
1140 @pytest.mark.parametrize("type_", ("book", "tag"))
1140 @pytest.mark.parametrize("type_", ("book", "tag"))
1141 def test_queries_remote_when_type_is_not_branch(self, type_):
1141 def test_queries_remote_when_type_is_not_branch(self, type_):
1142 ref = self._create_ref(type_, 'wrong-fake-name')
1142 ref = self._create_ref(type_, 'wrong-fake-name')
1143 with mock.patch.object(self.repo, "_remote") as remote_mock:
1143 with mock.patch.object(self.repo, "_remote") as remote_mock:
1144 remote_mock.ctx_branch.return_value = "fake-name"
1144 remote_mock.ctx_branch.return_value = "fake-name"
1145 result = self.repo._get_branch_name(ref)
1145 result = self.repo._get_branch_name(ref)
1146 assert result == "fake-name"
1146 assert result == "fake-name"
1147 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1147 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1148
1148
1149 def _create_ref(self, type_, name):
1149 def _create_ref(self, type_, name):
1150 ref = mock.Mock()
1150 ref = mock.Mock()
1151 ref.type = type_
1151 ref.type = type_
1152 ref.name = 'wrong-fake-name'
1152 ref.name = 'wrong-fake-name'
1153 ref.commit_id = "deadbeef"
1153 ref.commit_id = "deadbeef"
1154 return ref
1154 return ref
1155
1155
1156
1156
1157 class TestIsTheSameBranch(object):
1157 class TestIsTheSameBranch(object):
1158 def test_returns_true_when_branches_are_equal(self):
1158 def test_returns_true_when_branches_are_equal(self):
1159 source_ref = mock.Mock(name="source-ref")
1159 source_ref = mock.Mock(name="source-ref")
1160 target_ref = mock.Mock(name="target-ref")
1160 target_ref = mock.Mock(name="target-ref")
1161 branch_name_patcher = mock.patch.object(
1161 branch_name_patcher = mock.patch.object(
1162 self.repo, "_get_branch_name", return_value="default")
1162 self.repo, "_get_branch_name", return_value="default")
1163 with branch_name_patcher as branch_name_mock:
1163 with branch_name_patcher as branch_name_mock:
1164 result = self.repo._is_the_same_branch(source_ref, target_ref)
1164 result = self.repo._is_the_same_branch(source_ref, target_ref)
1165
1165
1166 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1166 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1167 assert branch_name_mock.call_args_list == expected_calls
1167 assert branch_name_mock.call_args_list == expected_calls
1168 assert result is True
1168 assert result is True
1169
1169
1170 def test_returns_false_when_branches_are_not_equal(self):
1170 def test_returns_false_when_branches_are_not_equal(self):
1171 source_ref = mock.Mock(name="source-ref")
1171 source_ref = mock.Mock(name="source-ref")
1172 source_ref.name = "source-branch"
1172 source_ref.name = "source-branch"
1173 target_ref = mock.Mock(name="target-ref")
1173 target_ref = mock.Mock(name="target-ref")
1174 source_ref.name = "target-branch"
1174 source_ref.name = "target-branch"
1175
1175
1176 def side_effect(ref):
1176 def side_effect(ref):
1177 return ref.name
1177 return ref.name
1178
1178
1179 branch_name_patcher = mock.patch.object(
1179 branch_name_patcher = mock.patch.object(
1180 self.repo, "_get_branch_name", side_effect=side_effect)
1180 self.repo, "_get_branch_name", side_effect=side_effect)
1181 with branch_name_patcher as branch_name_mock:
1181 with branch_name_patcher as branch_name_mock:
1182 result = self.repo._is_the_same_branch(source_ref, target_ref)
1182 result = self.repo._is_the_same_branch(source_ref, target_ref)
1183
1183
1184 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1184 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1185 assert branch_name_mock.call_args_list == expected_calls
1185 assert branch_name_mock.call_args_list == expected_calls
1186 assert result is False
1186 assert result is False
General Comments 0
You need to be logged in to leave comments. Login now