##// END OF EJS Templates
pull-requests: fixed case for GIT repositories when a merge check failed due to merge conflicts the pull request wrongly reported missing commits....
marcink -
r4299:04e45b92 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,52 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4 from sqlalchemy import *
5
6 from alembic.migration import MigrationContext
7 from alembic.operations import Operations
8 from sqlalchemy import BigInteger
9
10 from rhodecode.lib.dbmigrate.versions import _reset_base
11 from rhodecode.model import init_model_encryption
12
13
14 log = logging.getLogger(__name__)
15
16
17 def upgrade(migrate_engine):
18 """
19 Upgrade operations go here.
20 Don't create your own engine; bind migrate_engine to your metadata
21 """
22 _reset_base(migrate_engine)
23 from rhodecode.lib.dbmigrate.schema import db_4_18_0_1 as db
24
25 init_model_encryption(db)
26
27 context = MigrationContext.configure(migrate_engine.connect())
28 op = Operations(context)
29
30 pull_requests = db.PullRequest.__table__
31
32 with op.batch_alter_table(pull_requests.name) as batch_op:
33 new_column = Column(
34 'last_merge_metadata',
35 db.JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
36 batch_op.add_column(new_column)
37
38 pull_request_version = db.PullRequestVersion.__table__
39 with op.batch_alter_table(pull_request_version.name) as batch_op:
40 new_column = Column(
41 'last_merge_metadata',
42 db.JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
43 batch_op.add_column(new_column)
44
45
46 def downgrade(migrate_engine):
47 meta = MetaData()
48 meta.bind = migrate_engine
49
50
51 def fixups(models, _SESSION):
52 pass
@@ -1,57 +1,57 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import sys
23 23 import platform
24 24
25 25 VERSION = tuple(open(os.path.join(
26 26 os.path.dirname(__file__), 'VERSION')).read().split('.'))
27 27
28 28 BACKENDS = {
29 29 'hg': 'Mercurial repository',
30 30 'git': 'Git repository',
31 31 'svn': 'Subversion repository',
32 32 }
33 33
34 34 CELERY_ENABLED = False
35 35 CELERY_EAGER = False
36 36
37 37 # link to config for pyramid
38 38 CONFIG = {}
39 39
40 40 # Populated with the settings dictionary from application init in
41 41 # rhodecode.conf.environment.load_pyramid_environment
42 42 PYRAMID_SETTINGS = {}
43 43
44 44 # Linked module for extensions
45 45 EXTENSIONS = {}
46 46
47 47 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
48 __dbversion__ = 103 # defines current db version for migrations
48 __dbversion__ = 104 # defines current db version for migrations
49 49 __platform__ = platform.system()
50 50 __license__ = 'AGPLv3, and Commercial License'
51 51 __author__ = 'RhodeCode GmbH'
52 52 __url__ = 'https://code.rhodecode.com'
53 53
54 54 is_windows = __platform__ in ['Windows']
55 55 is_unix = not is_windows
56 56 is_test = False
57 57 disable_error_handler = False
@@ -1,1215 +1,1217 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.tests import (
34 34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 35
36 36
37 37 def route_path(name, params=None, **kwargs):
38 38 import urllib
39 39
40 40 base_url = {
41 41 'repo_changelog': '/{repo_name}/changelog',
42 42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 43 'repo_commits': '/{repo_name}/commits',
44 44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
45 45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
46 46 'pullrequest_show_all': '/{repo_name}/pull-request',
47 47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
48 48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
49 49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
50 50 'pullrequest_new': '/{repo_name}/pull-request/new',
51 51 'pullrequest_create': '/{repo_name}/pull-request/create',
52 52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
53 53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
54 54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
55 55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
56 56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
57 57 }[name].format(**kwargs)
58 58
59 59 if params:
60 60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
61 61 return base_url
62 62
63 63
64 64 @pytest.mark.usefixtures('app', 'autologin_user')
65 65 @pytest.mark.backends("git", "hg")
66 66 class TestPullrequestsView(object):
67 67
68 68 def test_index(self, backend):
69 69 self.app.get(route_path(
70 70 'pullrequest_new',
71 71 repo_name=backend.repo_name))
72 72
73 73 def test_option_menu_create_pull_request_exists(self, backend):
74 74 repo_name = backend.repo_name
75 75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
76 76
77 77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
78 78 'pullrequest_new', repo_name=repo_name)
79 79 response.mustcontain(create_pr_link)
80 80
81 81 def test_create_pr_form_with_raw_commit_id(self, backend):
82 82 repo = backend.repo
83 83
84 84 self.app.get(
85 85 route_path('pullrequest_new', repo_name=repo.repo_name,
86 86 commit=repo.get_commit().raw_id),
87 87 status=200)
88 88
89 89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 90 @pytest.mark.parametrize('range_diff', ["0", "1"])
91 91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
92 92 pull_request = pr_util.create_pull_request(
93 93 mergeable=pr_merge_enabled, enable_notifications=False)
94 94
95 95 response = self.app.get(route_path(
96 96 'pullrequest_show',
97 97 repo_name=pull_request.target_repo.scm_instance().name,
98 98 pull_request_id=pull_request.pull_request_id,
99 99 params={'range-diff': range_diff}))
100 100
101 101 for commit_id in pull_request.revisions:
102 102 response.mustcontain(commit_id)
103 103
104 104 response.mustcontain(pull_request.target_ref_parts.type)
105 105 response.mustcontain(pull_request.target_ref_parts.name)
106 106
107 107 response.mustcontain('class="pull-request-merge"')
108 108
109 109 if pr_merge_enabled:
110 110 response.mustcontain('Pull request reviewer approval is pending')
111 111 else:
112 112 response.mustcontain('Server-side pull request merging is disabled.')
113 113
114 114 if range_diff == "1":
115 115 response.mustcontain('Turn off: Show the diff as commit range')
116 116
117 117 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
118 118 # Logout
119 119 response = self.app.post(
120 120 h.route_path('logout'),
121 121 params={'csrf_token': csrf_token})
122 122 # Login as regular user
123 123 response = self.app.post(h.route_path('login'),
124 124 {'username': TEST_USER_REGULAR_LOGIN,
125 125 'password': 'test12'})
126 126
127 127 pull_request = pr_util.create_pull_request(
128 128 author=TEST_USER_REGULAR_LOGIN)
129 129
130 130 response = self.app.get(route_path(
131 131 'pullrequest_show',
132 132 repo_name=pull_request.target_repo.scm_instance().name,
133 133 pull_request_id=pull_request.pull_request_id))
134 134
135 135 response.mustcontain('Server-side pull request merging is disabled.')
136 136
137 137 assert_response = response.assert_response()
138 138 # for regular user without a merge permissions, we don't see it
139 139 assert_response.no_element_exists('#close-pull-request-action')
140 140
141 141 user_util.grant_user_permission_to_repo(
142 142 pull_request.target_repo,
143 143 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
144 144 'repository.write')
145 145 response = self.app.get(route_path(
146 146 'pullrequest_show',
147 147 repo_name=pull_request.target_repo.scm_instance().name,
148 148 pull_request_id=pull_request.pull_request_id))
149 149
150 150 response.mustcontain('Server-side pull request merging is disabled.')
151 151
152 152 assert_response = response.assert_response()
153 153 # now regular user has a merge permissions, we have CLOSE button
154 154 assert_response.one_element_exists('#close-pull-request-action')
155 155
156 156 def test_show_invalid_commit_id(self, pr_util):
157 157 # Simulating invalid revisions which will cause a lookup error
158 158 pull_request = pr_util.create_pull_request()
159 159 pull_request.revisions = ['invalid']
160 160 Session().add(pull_request)
161 161 Session().commit()
162 162
163 163 response = self.app.get(route_path(
164 164 'pullrequest_show',
165 165 repo_name=pull_request.target_repo.scm_instance().name,
166 166 pull_request_id=pull_request.pull_request_id))
167 167
168 168 for commit_id in pull_request.revisions:
169 169 response.mustcontain(commit_id)
170 170
171 171 def test_show_invalid_source_reference(self, pr_util):
172 172 pull_request = pr_util.create_pull_request()
173 173 pull_request.source_ref = 'branch:b:invalid'
174 174 Session().add(pull_request)
175 175 Session().commit()
176 176
177 177 self.app.get(route_path(
178 178 'pullrequest_show',
179 179 repo_name=pull_request.target_repo.scm_instance().name,
180 180 pull_request_id=pull_request.pull_request_id))
181 181
182 182 def test_edit_title_description(self, pr_util, csrf_token):
183 183 pull_request = pr_util.create_pull_request()
184 184 pull_request_id = pull_request.pull_request_id
185 185
186 186 response = self.app.post(
187 187 route_path('pullrequest_update',
188 188 repo_name=pull_request.target_repo.repo_name,
189 189 pull_request_id=pull_request_id),
190 190 params={
191 191 'edit_pull_request': 'true',
192 192 'title': 'New title',
193 193 'description': 'New description',
194 194 'csrf_token': csrf_token})
195 195
196 196 assert_session_flash(
197 197 response, u'Pull request title & description updated.',
198 198 category='success')
199 199
200 200 pull_request = PullRequest.get(pull_request_id)
201 201 assert pull_request.title == 'New title'
202 202 assert pull_request.description == 'New description'
203 203
204 204 def test_edit_title_description_closed(self, pr_util, csrf_token):
205 205 pull_request = pr_util.create_pull_request()
206 206 pull_request_id = pull_request.pull_request_id
207 207 repo_name = pull_request.target_repo.repo_name
208 208 pr_util.close()
209 209
210 210 response = self.app.post(
211 211 route_path('pullrequest_update',
212 212 repo_name=repo_name, pull_request_id=pull_request_id),
213 213 params={
214 214 'edit_pull_request': 'true',
215 215 'title': 'New title',
216 216 'description': 'New description',
217 217 'csrf_token': csrf_token}, status=200)
218 218 assert_session_flash(
219 219 response, u'Cannot update closed pull requests.',
220 220 category='error')
221 221
222 222 def test_update_invalid_source_reference(self, pr_util, csrf_token):
223 223 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
224 224
225 225 pull_request = pr_util.create_pull_request()
226 226 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
227 227 Session().add(pull_request)
228 228 Session().commit()
229 229
230 230 pull_request_id = pull_request.pull_request_id
231 231
232 232 response = self.app.post(
233 233 route_path('pullrequest_update',
234 234 repo_name=pull_request.target_repo.repo_name,
235 235 pull_request_id=pull_request_id),
236 236 params={'update_commits': 'true', 'csrf_token': csrf_token})
237 237
238 238 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
239 239 UpdateFailureReason.MISSING_SOURCE_REF])
240 240 assert_session_flash(response, expected_msg, category='error')
241 241
242 242 def test_missing_target_reference(self, pr_util, csrf_token):
243 243 from rhodecode.lib.vcs.backends.base import MergeFailureReason
244 244 pull_request = pr_util.create_pull_request(
245 245 approved=True, mergeable=True)
246 246 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
247 247 pull_request.target_ref = unicode_reference
248 248 Session().add(pull_request)
249 249 Session().commit()
250 250
251 251 pull_request_id = pull_request.pull_request_id
252 252 pull_request_url = route_path(
253 253 'pullrequest_show',
254 254 repo_name=pull_request.target_repo.repo_name,
255 255 pull_request_id=pull_request_id)
256 256
257 257 response = self.app.get(pull_request_url)
258 258 target_ref_id = 'invalid-branch'
259 259 merge_resp = MergeResponse(
260 260 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
261 261 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
262 262 response.assert_response().element_contains(
263 263 'div[data-role="merge-message"]', merge_resp.merge_status_message)
264 264
265 265 def test_comment_and_close_pull_request_custom_message_approved(
266 266 self, pr_util, csrf_token, xhr_header):
267 267
268 268 pull_request = pr_util.create_pull_request(approved=True)
269 269 pull_request_id = pull_request.pull_request_id
270 270 author = pull_request.user_id
271 271 repo = pull_request.target_repo.repo_id
272 272
273 273 self.app.post(
274 274 route_path('pullrequest_comment_create',
275 275 repo_name=pull_request.target_repo.scm_instance().name,
276 276 pull_request_id=pull_request_id),
277 277 params={
278 278 'close_pull_request': '1',
279 279 'text': 'Closing a PR',
280 280 'csrf_token': csrf_token},
281 281 extra_environ=xhr_header,)
282 282
283 283 journal = UserLog.query()\
284 284 .filter(UserLog.user_id == author)\
285 285 .filter(UserLog.repository_id == repo) \
286 286 .order_by(UserLog.user_log_id.asc()) \
287 287 .all()
288 288 assert journal[-1].action == 'repo.pull_request.close'
289 289
290 290 pull_request = PullRequest.get(pull_request_id)
291 291 assert pull_request.is_closed()
292 292
293 293 status = ChangesetStatusModel().get_status(
294 294 pull_request.source_repo, pull_request=pull_request)
295 295 assert status == ChangesetStatus.STATUS_APPROVED
296 296 comments = ChangesetComment().query() \
297 297 .filter(ChangesetComment.pull_request == pull_request) \
298 298 .order_by(ChangesetComment.comment_id.asc())\
299 299 .all()
300 300 assert comments[-1].text == 'Closing a PR'
301 301
302 302 def test_comment_force_close_pull_request_rejected(
303 303 self, pr_util, csrf_token, xhr_header):
304 304 pull_request = pr_util.create_pull_request()
305 305 pull_request_id = pull_request.pull_request_id
306 306 PullRequestModel().update_reviewers(
307 307 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
308 308 pull_request.author)
309 309 author = pull_request.user_id
310 310 repo = pull_request.target_repo.repo_id
311 311
312 312 self.app.post(
313 313 route_path('pullrequest_comment_create',
314 314 repo_name=pull_request.target_repo.scm_instance().name,
315 315 pull_request_id=pull_request_id),
316 316 params={
317 317 'close_pull_request': '1',
318 318 'csrf_token': csrf_token},
319 319 extra_environ=xhr_header)
320 320
321 321 pull_request = PullRequest.get(pull_request_id)
322 322
323 323 journal = UserLog.query()\
324 324 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
325 325 .order_by(UserLog.user_log_id.asc()) \
326 326 .all()
327 327 assert journal[-1].action == 'repo.pull_request.close'
328 328
329 329 # check only the latest status, not the review status
330 330 status = ChangesetStatusModel().get_status(
331 331 pull_request.source_repo, pull_request=pull_request)
332 332 assert status == ChangesetStatus.STATUS_REJECTED
333 333
334 334 def test_comment_and_close_pull_request(
335 335 self, pr_util, csrf_token, xhr_header):
336 336 pull_request = pr_util.create_pull_request()
337 337 pull_request_id = pull_request.pull_request_id
338 338
339 339 response = self.app.post(
340 340 route_path('pullrequest_comment_create',
341 341 repo_name=pull_request.target_repo.scm_instance().name,
342 342 pull_request_id=pull_request.pull_request_id),
343 343 params={
344 344 'close_pull_request': 'true',
345 345 'csrf_token': csrf_token},
346 346 extra_environ=xhr_header)
347 347
348 348 assert response.json
349 349
350 350 pull_request = PullRequest.get(pull_request_id)
351 351 assert pull_request.is_closed()
352 352
353 353 # check only the latest status, not the review status
354 354 status = ChangesetStatusModel().get_status(
355 355 pull_request.source_repo, pull_request=pull_request)
356 356 assert status == ChangesetStatus.STATUS_REJECTED
357 357
358 358 def test_create_pull_request(self, backend, csrf_token):
359 359 commits = [
360 360 {'message': 'ancestor'},
361 361 {'message': 'change'},
362 362 {'message': 'change2'},
363 363 ]
364 364 commit_ids = backend.create_master_repo(commits)
365 365 target = backend.create_repo(heads=['ancestor'])
366 366 source = backend.create_repo(heads=['change2'])
367 367
368 368 response = self.app.post(
369 369 route_path('pullrequest_create', repo_name=source.repo_name),
370 370 [
371 371 ('source_repo', source.repo_name),
372 372 ('source_ref', 'branch:default:' + commit_ids['change2']),
373 373 ('target_repo', target.repo_name),
374 374 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
375 375 ('common_ancestor', commit_ids['ancestor']),
376 376 ('pullrequest_title', 'Title'),
377 377 ('pullrequest_desc', 'Description'),
378 378 ('description_renderer', 'markdown'),
379 379 ('__start__', 'review_members:sequence'),
380 380 ('__start__', 'reviewer:mapping'),
381 381 ('user_id', '1'),
382 382 ('__start__', 'reasons:sequence'),
383 383 ('reason', 'Some reason'),
384 384 ('__end__', 'reasons:sequence'),
385 385 ('__start__', 'rules:sequence'),
386 386 ('__end__', 'rules:sequence'),
387 387 ('mandatory', 'False'),
388 388 ('__end__', 'reviewer:mapping'),
389 389 ('__end__', 'review_members:sequence'),
390 390 ('__start__', 'revisions:sequence'),
391 391 ('revisions', commit_ids['change']),
392 392 ('revisions', commit_ids['change2']),
393 393 ('__end__', 'revisions:sequence'),
394 394 ('user', ''),
395 395 ('csrf_token', csrf_token),
396 396 ],
397 397 status=302)
398 398
399 399 location = response.headers['Location']
400 400 pull_request_id = location.rsplit('/', 1)[1]
401 401 assert pull_request_id != 'new'
402 402 pull_request = PullRequest.get(int(pull_request_id))
403 403
404 404 # check that we have now both revisions
405 405 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
406 406 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
407 407 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
408 408 assert pull_request.target_ref == expected_target_ref
409 409
410 410 def test_reviewer_notifications(self, backend, csrf_token):
411 411 # We have to use the app.post for this test so it will create the
412 412 # notifications properly with the new PR
413 413 commits = [
414 414 {'message': 'ancestor',
415 415 'added': [FileNode('file_A', content='content_of_ancestor')]},
416 416 {'message': 'change',
417 417 'added': [FileNode('file_a', content='content_of_change')]},
418 418 {'message': 'change-child'},
419 419 {'message': 'ancestor-child', 'parents': ['ancestor'],
420 420 'added': [
421 421 FileNode('file_B', content='content_of_ancestor_child')]},
422 422 {'message': 'ancestor-child-2'},
423 423 ]
424 424 commit_ids = backend.create_master_repo(commits)
425 425 target = backend.create_repo(heads=['ancestor-child'])
426 426 source = backend.create_repo(heads=['change'])
427 427
428 428 response = self.app.post(
429 429 route_path('pullrequest_create', repo_name=source.repo_name),
430 430 [
431 431 ('source_repo', source.repo_name),
432 432 ('source_ref', 'branch:default:' + commit_ids['change']),
433 433 ('target_repo', target.repo_name),
434 434 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
435 435 ('common_ancestor', commit_ids['ancestor']),
436 436 ('pullrequest_title', 'Title'),
437 437 ('pullrequest_desc', 'Description'),
438 438 ('description_renderer', 'markdown'),
439 439 ('__start__', 'review_members:sequence'),
440 440 ('__start__', 'reviewer:mapping'),
441 441 ('user_id', '2'),
442 442 ('__start__', 'reasons:sequence'),
443 443 ('reason', 'Some reason'),
444 444 ('__end__', 'reasons:sequence'),
445 445 ('__start__', 'rules:sequence'),
446 446 ('__end__', 'rules:sequence'),
447 447 ('mandatory', 'False'),
448 448 ('__end__', 'reviewer:mapping'),
449 449 ('__end__', 'review_members:sequence'),
450 450 ('__start__', 'revisions:sequence'),
451 451 ('revisions', commit_ids['change']),
452 452 ('__end__', 'revisions:sequence'),
453 453 ('user', ''),
454 454 ('csrf_token', csrf_token),
455 455 ],
456 456 status=302)
457 457
458 458 location = response.headers['Location']
459 459
460 460 pull_request_id = location.rsplit('/', 1)[1]
461 461 assert pull_request_id != 'new'
462 462 pull_request = PullRequest.get(int(pull_request_id))
463 463
464 464 # Check that a notification was made
465 465 notifications = Notification.query()\
466 466 .filter(Notification.created_by == pull_request.author.user_id,
467 467 Notification.type_ == Notification.TYPE_PULL_REQUEST,
468 468 Notification.subject.contains(
469 469 "requested a pull request review. !%s" % pull_request_id))
470 470 assert len(notifications.all()) == 1
471 471
472 472 # Change reviewers and check that a notification was made
473 473 PullRequestModel().update_reviewers(
474 474 pull_request.pull_request_id, [(1, [], False, [])],
475 475 pull_request.author)
476 476 assert len(notifications.all()) == 2
477 477
478 478 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
479 479 csrf_token):
480 480 commits = [
481 481 {'message': 'ancestor',
482 482 'added': [FileNode('file_A', content='content_of_ancestor')]},
483 483 {'message': 'change',
484 484 'added': [FileNode('file_a', content='content_of_change')]},
485 485 {'message': 'change-child'},
486 486 {'message': 'ancestor-child', 'parents': ['ancestor'],
487 487 'added': [
488 488 FileNode('file_B', content='content_of_ancestor_child')]},
489 489 {'message': 'ancestor-child-2'},
490 490 ]
491 491 commit_ids = backend.create_master_repo(commits)
492 492 target = backend.create_repo(heads=['ancestor-child'])
493 493 source = backend.create_repo(heads=['change'])
494 494
495 495 response = self.app.post(
496 496 route_path('pullrequest_create', repo_name=source.repo_name),
497 497 [
498 498 ('source_repo', source.repo_name),
499 499 ('source_ref', 'branch:default:' + commit_ids['change']),
500 500 ('target_repo', target.repo_name),
501 501 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
502 502 ('common_ancestor', commit_ids['ancestor']),
503 503 ('pullrequest_title', 'Title'),
504 504 ('pullrequest_desc', 'Description'),
505 505 ('description_renderer', 'markdown'),
506 506 ('__start__', 'review_members:sequence'),
507 507 ('__start__', 'reviewer:mapping'),
508 508 ('user_id', '1'),
509 509 ('__start__', 'reasons:sequence'),
510 510 ('reason', 'Some reason'),
511 511 ('__end__', 'reasons:sequence'),
512 512 ('__start__', 'rules:sequence'),
513 513 ('__end__', 'rules:sequence'),
514 514 ('mandatory', 'False'),
515 515 ('__end__', 'reviewer:mapping'),
516 516 ('__end__', 'review_members:sequence'),
517 517 ('__start__', 'revisions:sequence'),
518 518 ('revisions', commit_ids['change']),
519 519 ('__end__', 'revisions:sequence'),
520 520 ('user', ''),
521 521 ('csrf_token', csrf_token),
522 522 ],
523 523 status=302)
524 524
525 525 location = response.headers['Location']
526 526
527 527 pull_request_id = location.rsplit('/', 1)[1]
528 528 assert pull_request_id != 'new'
529 529 pull_request = PullRequest.get(int(pull_request_id))
530 530
531 531 # target_ref has to point to the ancestor's commit_id in order to
532 532 # show the correct diff
533 533 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
534 534 assert pull_request.target_ref == expected_target_ref
535 535
536 536 # Check generated diff contents
537 537 response = response.follow()
538 538 response.mustcontain(no=['content_of_ancestor'])
539 539 response.mustcontain(no=['content_of_ancestor-child'])
540 540 response.mustcontain('content_of_change')
541 541
542 542 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
543 543 # Clear any previous calls to rcextensions
544 544 rhodecode.EXTENSIONS.calls.clear()
545 545
546 546 pull_request = pr_util.create_pull_request(
547 547 approved=True, mergeable=True)
548 548 pull_request_id = pull_request.pull_request_id
549 549 repo_name = pull_request.target_repo.scm_instance().name,
550 550
551 551 url = route_path('pullrequest_merge',
552 552 repo_name=str(repo_name[0]),
553 553 pull_request_id=pull_request_id)
554 554 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
555 555
556 556 pull_request = PullRequest.get(pull_request_id)
557 557
558 558 assert response.status_int == 200
559 559 assert pull_request.is_closed()
560 560 assert_pull_request_status(
561 561 pull_request, ChangesetStatus.STATUS_APPROVED)
562 562
563 563 # Check the relevant log entries were added
564 564 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
565 565 actions = [log.action for log in user_logs]
566 566 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
567 567 expected_actions = [
568 568 u'repo.pull_request.close',
569 569 u'repo.pull_request.merge',
570 570 u'repo.pull_request.comment.create'
571 571 ]
572 572 assert actions == expected_actions
573 573
574 574 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
575 575 actions = [log for log in user_logs]
576 576 assert actions[-1].action == 'user.push'
577 577 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
578 578
579 579 # Check post_push rcextension was really executed
580 580 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
581 581 assert len(push_calls) == 1
582 582 unused_last_call_args, last_call_kwargs = push_calls[0]
583 583 assert last_call_kwargs['action'] == 'push'
584 584 assert last_call_kwargs['commit_ids'] == pr_commit_ids
585 585
586 586 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
587 587 pull_request = pr_util.create_pull_request(mergeable=False)
588 588 pull_request_id = pull_request.pull_request_id
589 589 pull_request = PullRequest.get(pull_request_id)
590 590
591 591 response = self.app.post(
592 592 route_path('pullrequest_merge',
593 593 repo_name=pull_request.target_repo.scm_instance().name,
594 594 pull_request_id=pull_request.pull_request_id),
595 595 params={'csrf_token': csrf_token}).follow()
596 596
597 597 assert response.status_int == 200
598 598 response.mustcontain(
599 599 'Merge is not currently possible because of below failed checks.')
600 600 response.mustcontain('Server-side pull request merging is disabled.')
601 601
602 602 @pytest.mark.skip_backends('svn')
603 603 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
604 604 pull_request = pr_util.create_pull_request(mergeable=True)
605 605 pull_request_id = pull_request.pull_request_id
606 606 repo_name = pull_request.target_repo.scm_instance().name
607 607
608 608 response = self.app.post(
609 609 route_path('pullrequest_merge',
610 610 repo_name=repo_name, pull_request_id=pull_request_id),
611 611 params={'csrf_token': csrf_token}).follow()
612 612
613 613 assert response.status_int == 200
614 614
615 615 response.mustcontain(
616 616 'Merge is not currently possible because of below failed checks.')
617 617 response.mustcontain('Pull request reviewer approval is pending.')
618 618
619 619 def test_merge_pull_request_renders_failure_reason(
620 620 self, user_regular, csrf_token, pr_util):
621 621 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
622 622 pull_request_id = pull_request.pull_request_id
623 623 repo_name = pull_request.target_repo.scm_instance().name
624 624
625 625 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
626 626 MergeFailureReason.PUSH_FAILED,
627 627 metadata={'target': 'shadow repo',
628 628 'merge_commit': 'xxx'})
629 629 model_patcher = mock.patch.multiple(
630 630 PullRequestModel,
631 631 merge_repo=mock.Mock(return_value=merge_resp),
632 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
632 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
633 633
634 634 with model_patcher:
635 635 response = self.app.post(
636 636 route_path('pullrequest_merge',
637 637 repo_name=repo_name,
638 638 pull_request_id=pull_request_id),
639 639 params={'csrf_token': csrf_token}, status=302)
640 640
641 641 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
642 642 metadata={'target': 'shadow repo',
643 643 'merge_commit': 'xxx'})
644 644 assert_session_flash(response, merge_resp.merge_status_message)
645 645
646 646 def test_update_source_revision(self, backend, csrf_token):
647 647 commits = [
648 648 {'message': 'ancestor'},
649 649 {'message': 'change'},
650 650 {'message': 'change-2'},
651 651 ]
652 652 commit_ids = backend.create_master_repo(commits)
653 653 target = backend.create_repo(heads=['ancestor'])
654 654 source = backend.create_repo(heads=['change'])
655 655
656 656 # create pr from a in source to A in target
657 657 pull_request = PullRequest()
658 658
659 659 pull_request.source_repo = source
660 660 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
661 661 branch=backend.default_branch_name, commit_id=commit_ids['change'])
662 662
663 663 pull_request.target_repo = target
664 664 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
665 665 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
666 666
667 667 pull_request.revisions = [commit_ids['change']]
668 668 pull_request.title = u"Test"
669 669 pull_request.description = u"Description"
670 670 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
671 671 pull_request.pull_request_state = PullRequest.STATE_CREATED
672 672 Session().add(pull_request)
673 673 Session().commit()
674 674 pull_request_id = pull_request.pull_request_id
675 675
676 676 # source has ancestor - change - change-2
677 677 backend.pull_heads(source, heads=['change-2'])
678 678
679 679 # update PR
680 680 self.app.post(
681 681 route_path('pullrequest_update',
682 682 repo_name=target.repo_name, pull_request_id=pull_request_id),
683 683 params={'update_commits': 'true', 'csrf_token': csrf_token})
684 684
685 685 response = self.app.get(
686 686 route_path('pullrequest_show',
687 687 repo_name=target.repo_name,
688 688 pull_request_id=pull_request.pull_request_id))
689 689
690 690 assert response.status_int == 200
691 691 response.mustcontain('Pull request updated to')
692 692 response.mustcontain('with 1 added, 0 removed commits.')
693 693
694 694 # check that we have now both revisions
695 695 pull_request = PullRequest.get(pull_request_id)
696 696 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
697 697
698 698 def test_update_target_revision(self, backend, csrf_token):
699 699 commits = [
700 700 {'message': 'ancestor'},
701 701 {'message': 'change'},
702 702 {'message': 'ancestor-new', 'parents': ['ancestor']},
703 703 {'message': 'change-rebased'},
704 704 ]
705 705 commit_ids = backend.create_master_repo(commits)
706 706 target = backend.create_repo(heads=['ancestor'])
707 707 source = backend.create_repo(heads=['change'])
708 708
709 709 # create pr from a in source to A in target
710 710 pull_request = PullRequest()
711 711
712 712 pull_request.source_repo = source
713 713 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
714 714 branch=backend.default_branch_name, commit_id=commit_ids['change'])
715 715
716 716 pull_request.target_repo = target
717 717 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
718 718 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
719 719
720 720 pull_request.revisions = [commit_ids['change']]
721 721 pull_request.title = u"Test"
722 722 pull_request.description = u"Description"
723 723 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
724 724 pull_request.pull_request_state = PullRequest.STATE_CREATED
725 725
726 726 Session().add(pull_request)
727 727 Session().commit()
728 728 pull_request_id = pull_request.pull_request_id
729 729
730 730 # target has ancestor - ancestor-new
731 731 # source has ancestor - ancestor-new - change-rebased
732 732 backend.pull_heads(target, heads=['ancestor-new'])
733 733 backend.pull_heads(source, heads=['change-rebased'])
734 734
735 735 # update PR
736 736 url = route_path('pullrequest_update',
737 737 repo_name=target.repo_name,
738 738 pull_request_id=pull_request_id)
739 739 self.app.post(url,
740 740 params={'update_commits': 'true', 'csrf_token': csrf_token},
741 741 status=200)
742 742
743 743 # check that we have now both revisions
744 744 pull_request = PullRequest.get(pull_request_id)
745 745 assert pull_request.revisions == [commit_ids['change-rebased']]
746 746 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
747 747 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
748 748
749 749 response = self.app.get(
750 750 route_path('pullrequest_show',
751 751 repo_name=target.repo_name,
752 752 pull_request_id=pull_request.pull_request_id))
753 753 assert response.status_int == 200
754 754 response.mustcontain('Pull request updated to')
755 755 response.mustcontain('with 1 added, 1 removed commits.')
756 756
757 757 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
758 758 backend = backend_git
759 759 commits = [
760 760 {'message': 'master-commit-1'},
761 761 {'message': 'master-commit-2-change-1'},
762 762 {'message': 'master-commit-3-change-2'},
763 763
764 764 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
765 765 {'message': 'feat-commit-2'},
766 766 ]
767 767 commit_ids = backend.create_master_repo(commits)
768 768 target = backend.create_repo(heads=['master-commit-3-change-2'])
769 769 source = backend.create_repo(heads=['feat-commit-2'])
770 770
771 771 # create pr from a in source to A in target
772 772 pull_request = PullRequest()
773 773 pull_request.source_repo = source
774 774
775 775 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
776 776 branch=backend.default_branch_name,
777 777 commit_id=commit_ids['master-commit-3-change-2'])
778 778
779 779 pull_request.target_repo = target
780 780 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
781 781 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
782 782
783 783 pull_request.revisions = [
784 784 commit_ids['feat-commit-1'],
785 785 commit_ids['feat-commit-2']
786 786 ]
787 787 pull_request.title = u"Test"
788 788 pull_request.description = u"Description"
789 789 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
790 790 pull_request.pull_request_state = PullRequest.STATE_CREATED
791 791 Session().add(pull_request)
792 792 Session().commit()
793 793 pull_request_id = pull_request.pull_request_id
794 794
795 795 # PR is created, now we simulate a force-push into target,
796 796 # that drops a 2 last commits
797 797 vcsrepo = target.scm_instance()
798 798 vcsrepo.config.clear_section('hooks')
799 799 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
800 800
801 801 # update PR
802 802 url = route_path('pullrequest_update',
803 803 repo_name=target.repo_name,
804 804 pull_request_id=pull_request_id)
805 805 self.app.post(url,
806 806 params={'update_commits': 'true', 'csrf_token': csrf_token},
807 807 status=200)
808 808
809 809 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
810 810 assert response.status_int == 200
811 811 response.mustcontain('Pull request updated to')
812 812 response.mustcontain('with 0 added, 0 removed commits.')
813 813
814 814 def test_update_of_ancestor_reference(self, backend, csrf_token):
815 815 commits = [
816 816 {'message': 'ancestor'},
817 817 {'message': 'change'},
818 818 {'message': 'change-2'},
819 819 {'message': 'ancestor-new', 'parents': ['ancestor']},
820 820 {'message': 'change-rebased'},
821 821 ]
822 822 commit_ids = backend.create_master_repo(commits)
823 823 target = backend.create_repo(heads=['ancestor'])
824 824 source = backend.create_repo(heads=['change'])
825 825
826 826 # create pr from a in source to A in target
827 827 pull_request = PullRequest()
828 828 pull_request.source_repo = source
829 829
830 830 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
831 831 branch=backend.default_branch_name, commit_id=commit_ids['change'])
832 832 pull_request.target_repo = target
833 833 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
834 834 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
835 835 pull_request.revisions = [commit_ids['change']]
836 836 pull_request.title = u"Test"
837 837 pull_request.description = u"Description"
838 838 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
839 839 pull_request.pull_request_state = PullRequest.STATE_CREATED
840 840 Session().add(pull_request)
841 841 Session().commit()
842 842 pull_request_id = pull_request.pull_request_id
843 843
844 844 # target has ancestor - ancestor-new
845 845 # source has ancestor - ancestor-new - change-rebased
846 846 backend.pull_heads(target, heads=['ancestor-new'])
847 847 backend.pull_heads(source, heads=['change-rebased'])
848 848
849 849 # update PR
850 850 self.app.post(
851 851 route_path('pullrequest_update',
852 852 repo_name=target.repo_name, pull_request_id=pull_request_id),
853 853 params={'update_commits': 'true', 'csrf_token': csrf_token},
854 854 status=200)
855 855
856 856 # Expect the target reference to be updated correctly
857 857 pull_request = PullRequest.get(pull_request_id)
858 858 assert pull_request.revisions == [commit_ids['change-rebased']]
859 859 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
860 860 branch=backend.default_branch_name,
861 861 commit_id=commit_ids['ancestor-new'])
862 862 assert pull_request.target_ref == expected_target_ref
863 863
864 864 def test_remove_pull_request_branch(self, backend_git, csrf_token):
865 865 branch_name = 'development'
866 866 commits = [
867 867 {'message': 'initial-commit'},
868 868 {'message': 'old-feature'},
869 869 {'message': 'new-feature', 'branch': branch_name},
870 870 ]
871 871 repo = backend_git.create_repo(commits)
872 872 repo_name = repo.repo_name
873 873 commit_ids = backend_git.commit_ids
874 874
875 875 pull_request = PullRequest()
876 876 pull_request.source_repo = repo
877 877 pull_request.target_repo = repo
878 878 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
879 879 branch=branch_name, commit_id=commit_ids['new-feature'])
880 880 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
881 881 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
882 882 pull_request.revisions = [commit_ids['new-feature']]
883 883 pull_request.title = u"Test"
884 884 pull_request.description = u"Description"
885 885 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
886 886 pull_request.pull_request_state = PullRequest.STATE_CREATED
887 887 Session().add(pull_request)
888 888 Session().commit()
889 889
890 890 pull_request_id = pull_request.pull_request_id
891 891
892 892 vcs = repo.scm_instance()
893 893 vcs.remove_ref('refs/heads/{}'.format(branch_name))
894 # NOTE(marcink): run GC to ensure the commits are gone
895 vcs.run_gc()
894 896
895 897 response = self.app.get(route_path(
896 898 'pullrequest_show',
897 899 repo_name=repo_name,
898 900 pull_request_id=pull_request_id))
899 901
900 902 assert response.status_int == 200
901 903
902 904 response.assert_response().element_contains(
903 905 '#changeset_compare_view_content .alert strong',
904 906 'Missing commits')
905 907 response.assert_response().element_contains(
906 908 '#changeset_compare_view_content .alert',
907 909 'This pull request cannot be displayed, because one or more'
908 910 ' commits no longer exist in the source repository.')
909 911
910 912 def test_strip_commits_from_pull_request(
911 913 self, backend, pr_util, csrf_token):
912 914 commits = [
913 915 {'message': 'initial-commit'},
914 916 {'message': 'old-feature'},
915 917 {'message': 'new-feature', 'parents': ['initial-commit']},
916 918 ]
917 919 pull_request = pr_util.create_pull_request(
918 920 commits, target_head='initial-commit', source_head='new-feature',
919 921 revisions=['new-feature'])
920 922
921 923 vcs = pr_util.source_repository.scm_instance()
922 924 if backend.alias == 'git':
923 925 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
924 926 else:
925 927 vcs.strip(pr_util.commit_ids['new-feature'])
926 928
927 929 response = self.app.get(route_path(
928 930 'pullrequest_show',
929 931 repo_name=pr_util.target_repository.repo_name,
930 932 pull_request_id=pull_request.pull_request_id))
931 933
932 934 assert response.status_int == 200
933 935
934 936 response.assert_response().element_contains(
935 937 '#changeset_compare_view_content .alert strong',
936 938 'Missing commits')
937 939 response.assert_response().element_contains(
938 940 '#changeset_compare_view_content .alert',
939 941 'This pull request cannot be displayed, because one or more'
940 942 ' commits no longer exist in the source repository.')
941 943 response.assert_response().element_contains(
942 944 '#update_commits',
943 945 'Update commits')
944 946
945 947 def test_strip_commits_and_update(
946 948 self, backend, pr_util, csrf_token):
947 949 commits = [
948 950 {'message': 'initial-commit'},
949 951 {'message': 'old-feature'},
950 952 {'message': 'new-feature', 'parents': ['old-feature']},
951 953 ]
952 954 pull_request = pr_util.create_pull_request(
953 955 commits, target_head='old-feature', source_head='new-feature',
954 956 revisions=['new-feature'], mergeable=True)
955 957
956 958 vcs = pr_util.source_repository.scm_instance()
957 959 if backend.alias == 'git':
958 960 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
959 961 else:
960 962 vcs.strip(pr_util.commit_ids['new-feature'])
961 963
962 964 url = route_path('pullrequest_update',
963 965 repo_name=pull_request.target_repo.repo_name,
964 966 pull_request_id=pull_request.pull_request_id)
965 967 response = self.app.post(url,
966 968 params={'update_commits': 'true',
967 969 'csrf_token': csrf_token})
968 970
969 971 assert response.status_int == 200
970 972 assert response.body == '{"response": true, "redirect_url": null}'
971 973
972 974 # Make sure that after update, it won't raise 500 errors
973 975 response = self.app.get(route_path(
974 976 'pullrequest_show',
975 977 repo_name=pr_util.target_repository.repo_name,
976 978 pull_request_id=pull_request.pull_request_id))
977 979
978 980 assert response.status_int == 200
979 981 response.assert_response().element_contains(
980 982 '#changeset_compare_view_content .alert strong',
981 983 'Missing commits')
982 984
983 985 def test_branch_is_a_link(self, pr_util):
984 986 pull_request = pr_util.create_pull_request()
985 987 pull_request.source_ref = 'branch:origin:1234567890abcdef'
986 988 pull_request.target_ref = 'branch:target:abcdef1234567890'
987 989 Session().add(pull_request)
988 990 Session().commit()
989 991
990 992 response = self.app.get(route_path(
991 993 'pullrequest_show',
992 994 repo_name=pull_request.target_repo.scm_instance().name,
993 995 pull_request_id=pull_request.pull_request_id))
994 996 assert response.status_int == 200
995 997
996 998 source = response.assert_response().get_element('.pr-source-info')
997 999 source_parent = source.getparent()
998 1000 assert len(source_parent) == 1
999 1001
1000 1002 target = response.assert_response().get_element('.pr-target-info')
1001 1003 target_parent = target.getparent()
1002 1004 assert len(target_parent) == 1
1003 1005
1004 1006 expected_origin_link = route_path(
1005 1007 'repo_commits',
1006 1008 repo_name=pull_request.source_repo.scm_instance().name,
1007 1009 params=dict(branch='origin'))
1008 1010 expected_target_link = route_path(
1009 1011 'repo_commits',
1010 1012 repo_name=pull_request.target_repo.scm_instance().name,
1011 1013 params=dict(branch='target'))
1012 1014 assert source_parent.attrib['href'] == expected_origin_link
1013 1015 assert target_parent.attrib['href'] == expected_target_link
1014 1016
1015 1017 def test_bookmark_is_not_a_link(self, pr_util):
1016 1018 pull_request = pr_util.create_pull_request()
1017 1019 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1018 1020 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1019 1021 Session().add(pull_request)
1020 1022 Session().commit()
1021 1023
1022 1024 response = self.app.get(route_path(
1023 1025 'pullrequest_show',
1024 1026 repo_name=pull_request.target_repo.scm_instance().name,
1025 1027 pull_request_id=pull_request.pull_request_id))
1026 1028 assert response.status_int == 200
1027 1029
1028 1030 source = response.assert_response().get_element('.pr-source-info')
1029 1031 assert source.text.strip() == 'bookmark:origin'
1030 1032 assert source.getparent().attrib.get('href') is None
1031 1033
1032 1034 target = response.assert_response().get_element('.pr-target-info')
1033 1035 assert target.text.strip() == 'bookmark:target'
1034 1036 assert target.getparent().attrib.get('href') is None
1035 1037
1036 1038 def test_tag_is_not_a_link(self, pr_util):
1037 1039 pull_request = pr_util.create_pull_request()
1038 1040 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1039 1041 pull_request.target_ref = 'tag:target:abcdef1234567890'
1040 1042 Session().add(pull_request)
1041 1043 Session().commit()
1042 1044
1043 1045 response = self.app.get(route_path(
1044 1046 'pullrequest_show',
1045 1047 repo_name=pull_request.target_repo.scm_instance().name,
1046 1048 pull_request_id=pull_request.pull_request_id))
1047 1049 assert response.status_int == 200
1048 1050
1049 1051 source = response.assert_response().get_element('.pr-source-info')
1050 1052 assert source.text.strip() == 'tag:origin'
1051 1053 assert source.getparent().attrib.get('href') is None
1052 1054
1053 1055 target = response.assert_response().get_element('.pr-target-info')
1054 1056 assert target.text.strip() == 'tag:target'
1055 1057 assert target.getparent().attrib.get('href') is None
1056 1058
1057 1059 @pytest.mark.parametrize('mergeable', [True, False])
1058 1060 def test_shadow_repository_link(
1059 1061 self, mergeable, pr_util, http_host_only_stub):
1060 1062 """
1061 1063 Check that the pull request summary page displays a link to the shadow
1062 1064 repository if the pull request is mergeable. If it is not mergeable
1063 1065 the link should not be displayed.
1064 1066 """
1065 1067 pull_request = pr_util.create_pull_request(
1066 1068 mergeable=mergeable, enable_notifications=False)
1067 1069 target_repo = pull_request.target_repo.scm_instance()
1068 1070 pr_id = pull_request.pull_request_id
1069 1071 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1070 1072 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1071 1073
1072 1074 response = self.app.get(route_path(
1073 1075 'pullrequest_show',
1074 1076 repo_name=target_repo.name,
1075 1077 pull_request_id=pr_id))
1076 1078
1077 1079 if mergeable:
1078 1080 response.assert_response().element_value_contains(
1079 1081 'input.pr-mergeinfo', shadow_url)
1080 1082 response.assert_response().element_value_contains(
1081 1083 'input.pr-mergeinfo ', 'pr-merge')
1082 1084 else:
1083 1085 response.assert_response().no_element_exists('.pr-mergeinfo')
1084 1086
1085 1087
1086 1088 @pytest.mark.usefixtures('app')
1087 1089 @pytest.mark.backends("git", "hg")
1088 1090 class TestPullrequestsControllerDelete(object):
1089 1091 def test_pull_request_delete_button_permissions_admin(
1090 1092 self, autologin_user, user_admin, pr_util):
1091 1093 pull_request = pr_util.create_pull_request(
1092 1094 author=user_admin.username, enable_notifications=False)
1093 1095
1094 1096 response = self.app.get(route_path(
1095 1097 'pullrequest_show',
1096 1098 repo_name=pull_request.target_repo.scm_instance().name,
1097 1099 pull_request_id=pull_request.pull_request_id))
1098 1100
1099 1101 response.mustcontain('id="delete_pullrequest"')
1100 1102 response.mustcontain('Confirm to delete this pull request')
1101 1103
1102 1104 def test_pull_request_delete_button_permissions_owner(
1103 1105 self, autologin_regular_user, user_regular, pr_util):
1104 1106 pull_request = pr_util.create_pull_request(
1105 1107 author=user_regular.username, enable_notifications=False)
1106 1108
1107 1109 response = self.app.get(route_path(
1108 1110 'pullrequest_show',
1109 1111 repo_name=pull_request.target_repo.scm_instance().name,
1110 1112 pull_request_id=pull_request.pull_request_id))
1111 1113
1112 1114 response.mustcontain('id="delete_pullrequest"')
1113 1115 response.mustcontain('Confirm to delete this pull request')
1114 1116
1115 1117 def test_pull_request_delete_button_permissions_forbidden(
1116 1118 self, autologin_regular_user, user_regular, user_admin, pr_util):
1117 1119 pull_request = pr_util.create_pull_request(
1118 1120 author=user_admin.username, enable_notifications=False)
1119 1121
1120 1122 response = self.app.get(route_path(
1121 1123 'pullrequest_show',
1122 1124 repo_name=pull_request.target_repo.scm_instance().name,
1123 1125 pull_request_id=pull_request.pull_request_id))
1124 1126 response.mustcontain(no=['id="delete_pullrequest"'])
1125 1127 response.mustcontain(no=['Confirm to delete this pull request'])
1126 1128
1127 1129 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1128 1130 self, autologin_regular_user, user_regular, user_admin, pr_util,
1129 1131 user_util):
1130 1132
1131 1133 pull_request = pr_util.create_pull_request(
1132 1134 author=user_admin.username, enable_notifications=False)
1133 1135
1134 1136 user_util.grant_user_permission_to_repo(
1135 1137 pull_request.target_repo, user_regular,
1136 1138 'repository.write')
1137 1139
1138 1140 response = self.app.get(route_path(
1139 1141 'pullrequest_show',
1140 1142 repo_name=pull_request.target_repo.scm_instance().name,
1141 1143 pull_request_id=pull_request.pull_request_id))
1142 1144
1143 1145 response.mustcontain('id="open_edit_pullrequest"')
1144 1146 response.mustcontain('id="delete_pullrequest"')
1145 1147 response.mustcontain(no=['Confirm to delete this pull request'])
1146 1148
1147 1149 def test_delete_comment_returns_404_if_comment_does_not_exist(
1148 1150 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1149 1151
1150 1152 pull_request = pr_util.create_pull_request(
1151 1153 author=user_admin.username, enable_notifications=False)
1152 1154
1153 1155 self.app.post(
1154 1156 route_path(
1155 1157 'pullrequest_comment_delete',
1156 1158 repo_name=pull_request.target_repo.scm_instance().name,
1157 1159 pull_request_id=pull_request.pull_request_id,
1158 1160 comment_id=1024404),
1159 1161 extra_environ=xhr_header,
1160 1162 params={'csrf_token': csrf_token},
1161 1163 status=404
1162 1164 )
1163 1165
1164 1166 def test_delete_comment(
1165 1167 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1166 1168
1167 1169 pull_request = pr_util.create_pull_request(
1168 1170 author=user_admin.username, enable_notifications=False)
1169 1171 comment = pr_util.create_comment()
1170 1172 comment_id = comment.comment_id
1171 1173
1172 1174 response = self.app.post(
1173 1175 route_path(
1174 1176 'pullrequest_comment_delete',
1175 1177 repo_name=pull_request.target_repo.scm_instance().name,
1176 1178 pull_request_id=pull_request.pull_request_id,
1177 1179 comment_id=comment_id),
1178 1180 extra_environ=xhr_header,
1179 1181 params={'csrf_token': csrf_token},
1180 1182 status=200
1181 1183 )
1182 1184 assert response.body == 'true'
1183 1185
1184 1186 @pytest.mark.parametrize('url_type', [
1185 1187 'pullrequest_new',
1186 1188 'pullrequest_create',
1187 1189 'pullrequest_update',
1188 1190 'pullrequest_merge',
1189 1191 ])
1190 1192 def test_pull_request_is_forbidden_on_archived_repo(
1191 1193 self, autologin_user, backend, xhr_header, user_util, url_type):
1192 1194
1193 1195 # create a temporary repo
1194 1196 source = user_util.create_repo(repo_type=backend.alias)
1195 1197 repo_name = source.repo_name
1196 1198 repo = Repository.get_by_repo_name(repo_name)
1197 1199 repo.archived = True
1198 1200 Session().commit()
1199 1201
1200 1202 response = self.app.get(
1201 1203 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1202 1204
1203 1205 msg = 'Action not supported for archived repository.'
1204 1206 assert_session_flash(response, msg)
1205 1207
1206 1208
1207 1209 def assert_pull_request_status(pull_request, expected_status):
1208 1210 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1209 1211 assert status == expected_status
1210 1212
1211 1213
1212 1214 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1213 1215 @pytest.mark.usefixtures("autologin_user")
1214 1216 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1215 1217 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
@@ -1,1493 +1,1506 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33
34 34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 35 from rhodecode.lib.base import vcs_operation_context
36 36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 37 from rhodecode.lib.ext_json import json
38 38 from rhodecode.lib.auth import (
39 39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 40 NotAnonymous, CSRFRequired)
41 41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 44 RepositoryRequirementError, EmptyRepositoryError)
45 45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 46 from rhodecode.model.comment import CommentsModel
47 47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 48 ChangesetComment, ChangesetStatus, Repository)
49 49 from rhodecode.model.forms import PullRequestForm
50 50 from rhodecode.model.meta import Session
51 51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 52 from rhodecode.model.scm import ScmModel
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 58
59 59 def load_default_context(self):
60 60 c = self._get_local_tmpl_context(include_app_defaults=True)
61 61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 63 # backward compat., we use for OLD PRs a plain renderer
64 64 c.renderer = 'plain'
65 65 return c
66 66
67 67 def _get_pull_requests_list(
68 68 self, repo_name, source, filter_type, opened_by, statuses):
69 69
70 70 draw, start, limit = self._extract_chunk(self.request)
71 71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 72 _render = self.request.get_partial_renderer(
73 73 'rhodecode:templates/data_table/_dt_elements.mako')
74 74
75 75 # pagination
76 76
77 77 if filter_type == 'awaiting_review':
78 78 pull_requests = PullRequestModel().get_awaiting_review(
79 79 repo_name, search_q=search_q, source=source, opened_by=opened_by,
80 80 statuses=statuses, offset=start, length=limit,
81 81 order_by=order_by, order_dir=order_dir)
82 82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 83 repo_name, search_q=search_q, source=source, statuses=statuses,
84 84 opened_by=opened_by)
85 85 elif filter_type == 'awaiting_my_review':
86 86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 87 repo_name, search_q=search_q, source=source, opened_by=opened_by,
88 88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 89 offset=start, length=limit, order_by=order_by,
90 90 order_dir=order_dir)
91 91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 92 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
93 93 statuses=statuses, opened_by=opened_by)
94 94 else:
95 95 pull_requests = PullRequestModel().get_all(
96 96 repo_name, search_q=search_q, source=source, opened_by=opened_by,
97 97 statuses=statuses, offset=start, length=limit,
98 98 order_by=order_by, order_dir=order_dir)
99 99 pull_requests_total_count = PullRequestModel().count_all(
100 100 repo_name, search_q=search_q, source=source, statuses=statuses,
101 101 opened_by=opened_by)
102 102
103 103 data = []
104 104 comments_model = CommentsModel()
105 105 for pr in pull_requests:
106 106 comments = comments_model.get_all_comments(
107 107 self.db_repo.repo_id, pull_request=pr)
108 108
109 109 data.append({
110 110 'name': _render('pullrequest_name',
111 111 pr.pull_request_id, pr.pull_request_state,
112 112 pr.work_in_progress, pr.target_repo.repo_name),
113 113 'name_raw': pr.pull_request_id,
114 114 'status': _render('pullrequest_status',
115 115 pr.calculated_review_status()),
116 116 'title': _render('pullrequest_title', pr.title, pr.description),
117 117 'description': h.escape(pr.description),
118 118 'updated_on': _render('pullrequest_updated_on',
119 119 h.datetime_to_time(pr.updated_on)),
120 120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 121 'created_on': _render('pullrequest_updated_on',
122 122 h.datetime_to_time(pr.created_on)),
123 123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 124 'state': pr.pull_request_state,
125 125 'author': _render('pullrequest_author',
126 126 pr.author.full_contact, ),
127 127 'author_raw': pr.author.full_name,
128 128 'comments': _render('pullrequest_comments', len(comments)),
129 129 'comments_raw': len(comments),
130 130 'closed': pr.is_closed(),
131 131 })
132 132
133 133 data = ({
134 134 'draw': draw,
135 135 'data': data,
136 136 'recordsTotal': pull_requests_total_count,
137 137 'recordsFiltered': pull_requests_total_count,
138 138 })
139 139 return data
140 140
141 141 @LoginRequired()
142 142 @HasRepoPermissionAnyDecorator(
143 143 'repository.read', 'repository.write', 'repository.admin')
144 144 @view_config(
145 145 route_name='pullrequest_show_all', request_method='GET',
146 146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
147 147 def pull_request_list(self):
148 148 c = self.load_default_context()
149 149
150 150 req_get = self.request.GET
151 151 c.source = str2bool(req_get.get('source'))
152 152 c.closed = str2bool(req_get.get('closed'))
153 153 c.my = str2bool(req_get.get('my'))
154 154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
155 155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
156 156
157 157 c.active = 'open'
158 158 if c.my:
159 159 c.active = 'my'
160 160 if c.closed:
161 161 c.active = 'closed'
162 162 if c.awaiting_review and not c.source:
163 163 c.active = 'awaiting'
164 164 if c.source and not c.awaiting_review:
165 165 c.active = 'source'
166 166 if c.awaiting_my_review:
167 167 c.active = 'awaiting_my'
168 168
169 169 return self._get_template_context(c)
170 170
171 171 @LoginRequired()
172 172 @HasRepoPermissionAnyDecorator(
173 173 'repository.read', 'repository.write', 'repository.admin')
174 174 @view_config(
175 175 route_name='pullrequest_show_all_data', request_method='GET',
176 176 renderer='json_ext', xhr=True)
177 177 def pull_request_list_data(self):
178 178 self.load_default_context()
179 179
180 180 # additional filters
181 181 req_get = self.request.GET
182 182 source = str2bool(req_get.get('source'))
183 183 closed = str2bool(req_get.get('closed'))
184 184 my = str2bool(req_get.get('my'))
185 185 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187 187
188 188 filter_type = 'awaiting_review' if awaiting_review \
189 189 else 'awaiting_my_review' if awaiting_my_review \
190 190 else None
191 191
192 192 opened_by = None
193 193 if my:
194 194 opened_by = [self._rhodecode_user.user_id]
195 195
196 196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 197 if closed:
198 198 statuses = [PullRequest.STATUS_CLOSED]
199 199
200 200 data = self._get_pull_requests_list(
201 201 repo_name=self.db_repo_name, source=source,
202 202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203 203
204 204 return data
205 205
206 206 def _is_diff_cache_enabled(self, target_repo):
207 207 caching_enabled = self._get_general_setting(
208 208 target_repo, 'rhodecode_diff_cache')
209 209 log.debug('Diff caching enabled: %s', caching_enabled)
210 210 return caching_enabled
211 211
212 212 def _get_diffset(self, source_repo_name, source_repo,
213 213 source_ref_id, target_ref_id,
214 214 target_commit, source_commit, diff_limit, file_limit,
215 215 fulldiff, hide_whitespace_changes, diff_context):
216 216
217 217 vcs_diff = PullRequestModel().get_diff(
218 218 source_repo, source_ref_id, target_ref_id,
219 219 hide_whitespace_changes, diff_context)
220 220
221 221 diff_processor = diffs.DiffProcessor(
222 222 vcs_diff, format='newdiff', diff_limit=diff_limit,
223 223 file_limit=file_limit, show_full_diff=fulldiff)
224 224
225 225 _parsed = diff_processor.prepare()
226 226
227 227 diffset = codeblocks.DiffSet(
228 228 repo_name=self.db_repo_name,
229 229 source_repo_name=source_repo_name,
230 230 source_node_getter=codeblocks.diffset_node_getter(target_commit),
231 231 target_node_getter=codeblocks.diffset_node_getter(source_commit),
232 232 )
233 233 diffset = self.path_filter.render_patchset_filtered(
234 234 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
235 235
236 236 return diffset
237 237
238 238 def _get_range_diffset(self, source_scm, source_repo,
239 239 commit1, commit2, diff_limit, file_limit,
240 240 fulldiff, hide_whitespace_changes, diff_context):
241 241 vcs_diff = source_scm.get_diff(
242 242 commit1, commit2,
243 243 ignore_whitespace=hide_whitespace_changes,
244 244 context=diff_context)
245 245
246 246 diff_processor = diffs.DiffProcessor(
247 247 vcs_diff, format='newdiff', diff_limit=diff_limit,
248 248 file_limit=file_limit, show_full_diff=fulldiff)
249 249
250 250 _parsed = diff_processor.prepare()
251 251
252 252 diffset = codeblocks.DiffSet(
253 253 repo_name=source_repo.repo_name,
254 254 source_node_getter=codeblocks.diffset_node_getter(commit1),
255 255 target_node_getter=codeblocks.diffset_node_getter(commit2))
256 256
257 257 diffset = self.path_filter.render_patchset_filtered(
258 258 diffset, _parsed, commit1.raw_id, commit2.raw_id)
259 259
260 260 return diffset
261 261
262 262 @LoginRequired()
263 263 @HasRepoPermissionAnyDecorator(
264 264 'repository.read', 'repository.write', 'repository.admin')
265 265 @view_config(
266 266 route_name='pullrequest_show', request_method='GET',
267 267 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
268 268 def pull_request_show(self):
269 269 _ = self.request.translate
270 270 c = self.load_default_context()
271 271
272 272 pull_request = PullRequest.get_or_404(
273 273 self.request.matchdict['pull_request_id'])
274 274 pull_request_id = pull_request.pull_request_id
275 275
276 276 c.state_progressing = pull_request.is_state_changing()
277 277
278 278 _new_state = {
279 279 'created': PullRequest.STATE_CREATED,
280 280 }.get(self.request.GET.get('force_state'))
281 281 if c.is_super_admin and _new_state:
282 282 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
283 283 h.flash(
284 284 _('Pull Request state was force changed to `{}`').format(_new_state),
285 285 category='success')
286 286 Session().commit()
287 287
288 288 raise HTTPFound(h.route_path(
289 289 'pullrequest_show', repo_name=self.db_repo_name,
290 290 pull_request_id=pull_request_id))
291 291
292 292 version = self.request.GET.get('version')
293 293 from_version = self.request.GET.get('from_version') or version
294 294 merge_checks = self.request.GET.get('merge_checks')
295 295 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
296 296
297 297 # fetch global flags of ignore ws or context lines
298 298 diff_context = diffs.get_diff_context(self.request)
299 299 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
300 300
301 301 force_refresh = str2bool(self.request.GET.get('force_refresh'))
302 302
303 303 (pull_request_latest,
304 304 pull_request_at_ver,
305 305 pull_request_display_obj,
306 306 at_version) = PullRequestModel().get_pr_version(
307 307 pull_request_id, version=version)
308 308 pr_closed = pull_request_latest.is_closed()
309 309
310 310 if pr_closed and (version or from_version):
311 311 # not allow to browse versions
312 312 raise HTTPFound(h.route_path(
313 313 'pullrequest_show', repo_name=self.db_repo_name,
314 314 pull_request_id=pull_request_id))
315 315
316 316 versions = pull_request_display_obj.versions()
317 317 # used to store per-commit range diffs
318 318 c.changes = collections.OrderedDict()
319 319 c.range_diff_on = self.request.GET.get('range-diff') == "1"
320 320
321 321 c.at_version = at_version
322 322 c.at_version_num = (at_version
323 323 if at_version and at_version != 'latest'
324 324 else None)
325 325 c.at_version_pos = ChangesetComment.get_index_from_version(
326 326 c.at_version_num, versions)
327 327
328 328 (prev_pull_request_latest,
329 329 prev_pull_request_at_ver,
330 330 prev_pull_request_display_obj,
331 331 prev_at_version) = PullRequestModel().get_pr_version(
332 332 pull_request_id, version=from_version)
333 333
334 334 c.from_version = prev_at_version
335 335 c.from_version_num = (prev_at_version
336 336 if prev_at_version and prev_at_version != 'latest'
337 337 else None)
338 338 c.from_version_pos = ChangesetComment.get_index_from_version(
339 339 c.from_version_num, versions)
340 340
341 341 # define if we're in COMPARE mode or VIEW at version mode
342 342 compare = at_version != prev_at_version
343 343
344 344 # pull_requests repo_name we opened it against
345 345 # ie. target_repo must match
346 346 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
347 347 raise HTTPNotFound()
348 348
349 349 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
350 350 pull_request_at_ver)
351 351
352 352 c.pull_request = pull_request_display_obj
353 353 c.renderer = pull_request_at_ver.description_renderer or c.renderer
354 354 c.pull_request_latest = pull_request_latest
355 355
356 356 if compare or (at_version and not at_version == 'latest'):
357 357 c.allowed_to_change_status = False
358 358 c.allowed_to_update = False
359 359 c.allowed_to_merge = False
360 360 c.allowed_to_delete = False
361 361 c.allowed_to_comment = False
362 362 c.allowed_to_close = False
363 363 else:
364 364 can_change_status = PullRequestModel().check_user_change_status(
365 365 pull_request_at_ver, self._rhodecode_user)
366 366 c.allowed_to_change_status = can_change_status and not pr_closed
367 367
368 368 c.allowed_to_update = PullRequestModel().check_user_update(
369 369 pull_request_latest, self._rhodecode_user) and not pr_closed
370 370 c.allowed_to_merge = PullRequestModel().check_user_merge(
371 371 pull_request_latest, self._rhodecode_user) and not pr_closed
372 372 c.allowed_to_delete = PullRequestModel().check_user_delete(
373 373 pull_request_latest, self._rhodecode_user) and not pr_closed
374 374 c.allowed_to_comment = not pr_closed
375 375 c.allowed_to_close = c.allowed_to_merge and not pr_closed
376 376
377 377 c.forbid_adding_reviewers = False
378 378 c.forbid_author_to_review = False
379 379 c.forbid_commit_author_to_review = False
380 380
381 381 if pull_request_latest.reviewer_data and \
382 382 'rules' in pull_request_latest.reviewer_data:
383 383 rules = pull_request_latest.reviewer_data['rules'] or {}
384 384 try:
385 385 c.forbid_adding_reviewers = rules.get(
386 386 'forbid_adding_reviewers')
387 387 c.forbid_author_to_review = rules.get(
388 388 'forbid_author_to_review')
389 389 c.forbid_commit_author_to_review = rules.get(
390 390 'forbid_commit_author_to_review')
391 391 except Exception:
392 392 pass
393 393
394 394 # check merge capabilities
395 395 _merge_check = MergeCheck.validate(
396 396 pull_request_latest, auth_user=self._rhodecode_user,
397 397 translator=self.request.translate,
398 398 force_shadow_repo_refresh=force_refresh)
399
399 400 c.pr_merge_errors = _merge_check.error_details
400 401 c.pr_merge_possible = not _merge_check.failed
401 402 c.pr_merge_message = _merge_check.merge_msg
402 403
403 404 c.pr_merge_info = MergeCheck.get_merge_conditions(
404 405 pull_request_latest, translator=self.request.translate)
405 406
406 407 c.pull_request_review_status = _merge_check.review_status
407 408 if merge_checks:
408 409 self.request.override_renderer = \
409 410 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
410 411 return self._get_template_context(c)
411 412
412 413 comments_model = CommentsModel()
413 414
414 415 # reviewers and statuses
415 416 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
416 417 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
417 418
418 419 # GENERAL COMMENTS with versions #
419 420 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
420 421 q = q.order_by(ChangesetComment.comment_id.asc())
421 422 general_comments = q
422 423
423 424 # pick comments we want to render at current version
424 425 c.comment_versions = comments_model.aggregate_comments(
425 426 general_comments, versions, c.at_version_num)
426 427 c.comments = c.comment_versions[c.at_version_num]['until']
427 428
428 429 # INLINE COMMENTS with versions #
429 430 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
430 431 q = q.order_by(ChangesetComment.comment_id.asc())
431 432 inline_comments = q
432 433
433 434 c.inline_versions = comments_model.aggregate_comments(
434 435 inline_comments, versions, c.at_version_num, inline=True)
435 436
436 437 # TODOs
437 438 c.unresolved_comments = CommentsModel() \
438 439 .get_pull_request_unresolved_todos(pull_request)
439 440 c.resolved_comments = CommentsModel() \
440 441 .get_pull_request_resolved_todos(pull_request)
441 442
442 443 # inject latest version
443 444 latest_ver = PullRequest.get_pr_display_object(
444 445 pull_request_latest, pull_request_latest)
445 446
446 447 c.versions = versions + [latest_ver]
447 448
448 449 # if we use version, then do not show later comments
449 450 # than current version
450 451 display_inline_comments = collections.defaultdict(
451 452 lambda: collections.defaultdict(list))
452 453 for co in inline_comments:
453 454 if c.at_version_num:
454 455 # pick comments that are at least UPTO given version, so we
455 456 # don't render comments for higher version
456 457 should_render = co.pull_request_version_id and \
457 458 co.pull_request_version_id <= c.at_version_num
458 459 else:
459 460 # showing all, for 'latest'
460 461 should_render = True
461 462
462 463 if should_render:
463 464 display_inline_comments[co.f_path][co.line_no].append(co)
464 465
465 466 # load diff data into template context, if we use compare mode then
466 467 # diff is calculated based on changes between versions of PR
467 468
468 469 source_repo = pull_request_at_ver.source_repo
469 470 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
470 471
471 472 target_repo = pull_request_at_ver.target_repo
472 473 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
473 474
474 475 if compare:
475 476 # in compare switch the diff base to latest commit from prev version
476 477 target_ref_id = prev_pull_request_display_obj.revisions[0]
477 478
478 479 # despite opening commits for bookmarks/branches/tags, we always
479 480 # convert this to rev to prevent changes after bookmark or branch change
480 481 c.source_ref_type = 'rev'
481 482 c.source_ref = source_ref_id
482 483
483 484 c.target_ref_type = 'rev'
484 485 c.target_ref = target_ref_id
485 486
486 487 c.source_repo = source_repo
487 488 c.target_repo = target_repo
488 489
489 490 c.commit_ranges = []
490 491 source_commit = EmptyCommit()
491 492 target_commit = EmptyCommit()
492 493 c.missing_requirements = False
493 494
494 495 source_scm = source_repo.scm_instance()
495 496 target_scm = target_repo.scm_instance()
496 497
497 498 shadow_scm = None
498 499 try:
499 500 shadow_scm = pull_request_latest.get_shadow_repo()
500 501 except Exception:
501 502 log.debug('Failed to get shadow repo', exc_info=True)
502 503 # try first the existing source_repo, and then shadow
503 504 # repo if we can obtain one
504 505 commits_source_repo = source_scm
505 506 if shadow_scm:
506 507 commits_source_repo = shadow_scm
507 508
508 509 c.commits_source_repo = commits_source_repo
509 510 c.ancestor = None # set it to None, to hide it from PR view
510 511
511 512 # empty version means latest, so we keep this to prevent
512 513 # double caching
513 514 version_normalized = version or 'latest'
514 515 from_version_normalized = from_version or 'latest'
515 516
516 517 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
517 518 cache_file_path = diff_cache_exist(
518 519 cache_path, 'pull_request', pull_request_id, version_normalized,
519 520 from_version_normalized, source_ref_id, target_ref_id,
520 521 hide_whitespace_changes, diff_context, c.fulldiff)
521 522
522 523 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
523 524 force_recache = self.get_recache_flag()
524 525
525 526 cached_diff = None
526 527 if caching_enabled:
527 528 cached_diff = load_cached_diff(cache_file_path)
528 529
529 530 has_proper_commit_cache = (
530 531 cached_diff and cached_diff.get('commits')
531 532 and len(cached_diff.get('commits', [])) == 5
532 533 and cached_diff.get('commits')[0]
533 534 and cached_diff.get('commits')[3])
534 535
535 536 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
536 537 diff_commit_cache = \
537 538 (ancestor_commit, commit_cache, missing_requirements,
538 539 source_commit, target_commit) = cached_diff['commits']
539 540 else:
541 # NOTE(marcink): we reach potentially unreachable errors when a PR has
542 # merge errors resulting in potentially hidden commits in the shadow repo.
543 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
544 and _merge_check.merge_response
545 maybe_unreachable = maybe_unreachable \
546 and _merge_check.merge_response.metadata.get('unresolved_files')
547 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
540 548 diff_commit_cache = \
541 549 (ancestor_commit, commit_cache, missing_requirements,
542 550 source_commit, target_commit) = self.get_commits(
543 551 commits_source_repo,
544 552 pull_request_at_ver,
545 553 source_commit,
546 554 source_ref_id,
547 555 source_scm,
548 556 target_commit,
549 557 target_ref_id,
550 target_scm)
558 target_scm, maybe_unreachable=maybe_unreachable)
551 559
552 560 # register our commit range
553 561 for comm in commit_cache.values():
554 562 c.commit_ranges.append(comm)
555 563
556 564 c.missing_requirements = missing_requirements
557 565 c.ancestor_commit = ancestor_commit
558 566 c.statuses = source_repo.statuses(
559 567 [x.raw_id for x in c.commit_ranges])
560 568
561 569 # auto collapse if we have more than limit
562 570 collapse_limit = diffs.DiffProcessor._collapse_commits_over
563 571 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
564 572 c.compare_mode = compare
565 573
566 574 # diff_limit is the old behavior, will cut off the whole diff
567 575 # if the limit is applied otherwise will just hide the
568 576 # big files from the front-end
569 577 diff_limit = c.visual.cut_off_limit_diff
570 578 file_limit = c.visual.cut_off_limit_file
571 579
572 580 c.missing_commits = False
573 581 if (c.missing_requirements
574 582 or isinstance(source_commit, EmptyCommit)
575 583 or source_commit == target_commit):
576 584
577 585 c.missing_commits = True
578 586 else:
579 587 c.inline_comments = display_inline_comments
580 588
581 589 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
582 590 if not force_recache and has_proper_diff_cache:
583 591 c.diffset = cached_diff['diff']
584 592 (ancestor_commit, commit_cache, missing_requirements,
585 593 source_commit, target_commit) = cached_diff['commits']
586 594 else:
587 595 c.diffset = self._get_diffset(
588 596 c.source_repo.repo_name, commits_source_repo,
589 597 source_ref_id, target_ref_id,
590 598 target_commit, source_commit,
591 599 diff_limit, file_limit, c.fulldiff,
592 600 hide_whitespace_changes, diff_context)
593 601
594 602 # save cached diff
595 603 if caching_enabled:
596 604 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
597 605
598 606 c.limited_diff = c.diffset.limited_diff
599 607
600 608 # calculate removed files that are bound to comments
601 609 comment_deleted_files = [
602 610 fname for fname in display_inline_comments
603 611 if fname not in c.diffset.file_stats]
604 612
605 613 c.deleted_files_comments = collections.defaultdict(dict)
606 614 for fname, per_line_comments in display_inline_comments.items():
607 615 if fname in comment_deleted_files:
608 616 c.deleted_files_comments[fname]['stats'] = 0
609 617 c.deleted_files_comments[fname]['comments'] = list()
610 618 for lno, comments in per_line_comments.items():
611 619 c.deleted_files_comments[fname]['comments'].extend(comments)
612 620
613 621 # maybe calculate the range diff
614 622 if c.range_diff_on:
615 623 # TODO(marcink): set whitespace/context
616 624 context_lcl = 3
617 625 ign_whitespace_lcl = False
618 626
619 627 for commit in c.commit_ranges:
620 628 commit2 = commit
621 629 commit1 = commit.first_parent
622 630
623 631 range_diff_cache_file_path = diff_cache_exist(
624 632 cache_path, 'diff', commit.raw_id,
625 633 ign_whitespace_lcl, context_lcl, c.fulldiff)
626 634
627 635 cached_diff = None
628 636 if caching_enabled:
629 637 cached_diff = load_cached_diff(range_diff_cache_file_path)
630 638
631 639 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
632 640 if not force_recache and has_proper_diff_cache:
633 641 diffset = cached_diff['diff']
634 642 else:
635 643 diffset = self._get_range_diffset(
636 644 commits_source_repo, source_repo,
637 645 commit1, commit2, diff_limit, file_limit,
638 646 c.fulldiff, ign_whitespace_lcl, context_lcl
639 647 )
640 648
641 649 # save cached diff
642 650 if caching_enabled:
643 651 cache_diff(range_diff_cache_file_path, diffset, None)
644 652
645 653 c.changes[commit.raw_id] = diffset
646 654
647 655 # this is a hack to properly display links, when creating PR, the
648 656 # compare view and others uses different notation, and
649 657 # compare_commits.mako renders links based on the target_repo.
650 658 # We need to swap that here to generate it properly on the html side
651 659 c.target_repo = c.source_repo
652 660
653 661 c.commit_statuses = ChangesetStatus.STATUSES
654 662
655 663 c.show_version_changes = not pr_closed
656 664 if c.show_version_changes:
657 665 cur_obj = pull_request_at_ver
658 666 prev_obj = prev_pull_request_at_ver
659 667
660 668 old_commit_ids = prev_obj.revisions
661 669 new_commit_ids = cur_obj.revisions
662 670 commit_changes = PullRequestModel()._calculate_commit_id_changes(
663 671 old_commit_ids, new_commit_ids)
664 672 c.commit_changes_summary = commit_changes
665 673
666 674 # calculate the diff for commits between versions
667 675 c.commit_changes = []
668 676 mark = lambda cs, fw: list(
669 677 h.itertools.izip_longest([], cs, fillvalue=fw))
670 678 for c_type, raw_id in mark(commit_changes.added, 'a') \
671 679 + mark(commit_changes.removed, 'r') \
672 680 + mark(commit_changes.common, 'c'):
673 681
674 682 if raw_id in commit_cache:
675 683 commit = commit_cache[raw_id]
676 684 else:
677 685 try:
678 686 commit = commits_source_repo.get_commit(raw_id)
679 687 except CommitDoesNotExistError:
680 688 # in case we fail extracting still use "dummy" commit
681 689 # for display in commit diff
682 690 commit = h.AttributeDict(
683 691 {'raw_id': raw_id,
684 692 'message': 'EMPTY or MISSING COMMIT'})
685 693 c.commit_changes.append([c_type, commit])
686 694
687 695 # current user review statuses for each version
688 696 c.review_versions = {}
689 697 if self._rhodecode_user.user_id in allowed_reviewers:
690 698 for co in general_comments:
691 699 if co.author.user_id == self._rhodecode_user.user_id:
692 700 status = co.status_change
693 701 if status:
694 702 _ver_pr = status[0].comment.pull_request_version_id
695 703 c.review_versions[_ver_pr] = status[0]
696 704
697 705 return self._get_template_context(c)
698 706
699 707 def get_commits(
700 708 self, commits_source_repo, pull_request_at_ver, source_commit,
701 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
709 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
710 maybe_unreachable=False):
711
702 712 commit_cache = collections.OrderedDict()
703 713 missing_requirements = False
714
704 715 try:
705 716 pre_load = ["author", "date", "message", "branch", "parents"]
706 show_revs = pull_request_at_ver.revisions
707 for rev in show_revs:
708 comm = commits_source_repo.get_commit(
709 commit_id=rev, pre_load=pre_load)
717
718 pull_request_commits = pull_request_at_ver.revisions
719 log.debug('Loading %s commits from %s',
720 len(pull_request_commits), commits_source_repo)
721
722 for rev in pull_request_commits:
723 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
724 maybe_unreachable=maybe_unreachable)
710 725 commit_cache[comm.raw_id] = comm
711 726
712 727 # Order here matters, we first need to get target, and then
713 728 # the source
714 729 target_commit = commits_source_repo.get_commit(
715 730 commit_id=safe_str(target_ref_id))
716 731
717 732 source_commit = commits_source_repo.get_commit(
718 commit_id=safe_str(source_ref_id))
733 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
719 734 except CommitDoesNotExistError:
720 log.warning(
721 'Failed to get commit from `{}` repo'.format(
722 commits_source_repo), exc_info=True)
735 log.warning('Failed to get commit from `{}` repo'.format(
736 commits_source_repo), exc_info=True)
723 737 except RepositoryRequirementError:
724 log.warning(
725 'Failed to get all required data from repo', exc_info=True)
738 log.warning('Failed to get all required data from repo', exc_info=True)
726 739 missing_requirements = True
727 740 ancestor_commit = None
728 741 try:
729 742 ancestor_id = source_scm.get_common_ancestor(
730 743 source_commit.raw_id, target_commit.raw_id, target_scm)
731 744 ancestor_commit = source_scm.get_commit(ancestor_id)
732 745 except Exception:
733 746 ancestor_commit = None
734 747 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
735 748
736 749 def assure_not_empty_repo(self):
737 750 _ = self.request.translate
738 751
739 752 try:
740 753 self.db_repo.scm_instance().get_commit()
741 754 except EmptyRepositoryError:
742 755 h.flash(h.literal(_('There are no commits yet')),
743 756 category='warning')
744 757 raise HTTPFound(
745 758 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
746 759
747 760 @LoginRequired()
748 761 @NotAnonymous()
749 762 @HasRepoPermissionAnyDecorator(
750 763 'repository.read', 'repository.write', 'repository.admin')
751 764 @view_config(
752 765 route_name='pullrequest_new', request_method='GET',
753 766 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
754 767 def pull_request_new(self):
755 768 _ = self.request.translate
756 769 c = self.load_default_context()
757 770
758 771 self.assure_not_empty_repo()
759 772 source_repo = self.db_repo
760 773
761 774 commit_id = self.request.GET.get('commit')
762 775 branch_ref = self.request.GET.get('branch')
763 776 bookmark_ref = self.request.GET.get('bookmark')
764 777
765 778 try:
766 779 source_repo_data = PullRequestModel().generate_repo_data(
767 780 source_repo, commit_id=commit_id,
768 781 branch=branch_ref, bookmark=bookmark_ref,
769 782 translator=self.request.translate)
770 783 except CommitDoesNotExistError as e:
771 784 log.exception(e)
772 785 h.flash(_('Commit does not exist'), 'error')
773 786 raise HTTPFound(
774 787 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
775 788
776 789 default_target_repo = source_repo
777 790
778 791 if source_repo.parent and c.has_origin_repo_read_perm:
779 792 parent_vcs_obj = source_repo.parent.scm_instance()
780 793 if parent_vcs_obj and not parent_vcs_obj.is_empty():
781 794 # change default if we have a parent repo
782 795 default_target_repo = source_repo.parent
783 796
784 797 target_repo_data = PullRequestModel().generate_repo_data(
785 798 default_target_repo, translator=self.request.translate)
786 799
787 800 selected_source_ref = source_repo_data['refs']['selected_ref']
788 801 title_source_ref = ''
789 802 if selected_source_ref:
790 803 title_source_ref = selected_source_ref.split(':', 2)[1]
791 804 c.default_title = PullRequestModel().generate_pullrequest_title(
792 805 source=source_repo.repo_name,
793 806 source_ref=title_source_ref,
794 807 target=default_target_repo.repo_name
795 808 )
796 809
797 810 c.default_repo_data = {
798 811 'source_repo_name': source_repo.repo_name,
799 812 'source_refs_json': json.dumps(source_repo_data),
800 813 'target_repo_name': default_target_repo.repo_name,
801 814 'target_refs_json': json.dumps(target_repo_data),
802 815 }
803 816 c.default_source_ref = selected_source_ref
804 817
805 818 return self._get_template_context(c)
806 819
807 820 @LoginRequired()
808 821 @NotAnonymous()
809 822 @HasRepoPermissionAnyDecorator(
810 823 'repository.read', 'repository.write', 'repository.admin')
811 824 @view_config(
812 825 route_name='pullrequest_repo_refs', request_method='GET',
813 826 renderer='json_ext', xhr=True)
814 827 def pull_request_repo_refs(self):
815 828 self.load_default_context()
816 829 target_repo_name = self.request.matchdict['target_repo_name']
817 830 repo = Repository.get_by_repo_name(target_repo_name)
818 831 if not repo:
819 832 raise HTTPNotFound()
820 833
821 834 target_perm = HasRepoPermissionAny(
822 835 'repository.read', 'repository.write', 'repository.admin')(
823 836 target_repo_name)
824 837 if not target_perm:
825 838 raise HTTPNotFound()
826 839
827 840 return PullRequestModel().generate_repo_data(
828 841 repo, translator=self.request.translate)
829 842
830 843 @LoginRequired()
831 844 @NotAnonymous()
832 845 @HasRepoPermissionAnyDecorator(
833 846 'repository.read', 'repository.write', 'repository.admin')
834 847 @view_config(
835 848 route_name='pullrequest_repo_targets', request_method='GET',
836 849 renderer='json_ext', xhr=True)
837 850 def pullrequest_repo_targets(self):
838 851 _ = self.request.translate
839 852 filter_query = self.request.GET.get('query')
840 853
841 854 # get the parents
842 855 parent_target_repos = []
843 856 if self.db_repo.parent:
844 857 parents_query = Repository.query() \
845 858 .order_by(func.length(Repository.repo_name)) \
846 859 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
847 860
848 861 if filter_query:
849 862 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
850 863 parents_query = parents_query.filter(
851 864 Repository.repo_name.ilike(ilike_expression))
852 865 parents = parents_query.limit(20).all()
853 866
854 867 for parent in parents:
855 868 parent_vcs_obj = parent.scm_instance()
856 869 if parent_vcs_obj and not parent_vcs_obj.is_empty():
857 870 parent_target_repos.append(parent)
858 871
859 872 # get other forks, and repo itself
860 873 query = Repository.query() \
861 874 .order_by(func.length(Repository.repo_name)) \
862 875 .filter(
863 876 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
864 877 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
865 878 ) \
866 879 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
867 880
868 881 if filter_query:
869 882 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
870 883 query = query.filter(Repository.repo_name.ilike(ilike_expression))
871 884
872 885 limit = max(20 - len(parent_target_repos), 5) # not less then 5
873 886 target_repos = query.limit(limit).all()
874 887
875 888 all_target_repos = target_repos + parent_target_repos
876 889
877 890 repos = []
878 891 # This checks permissions to the repositories
879 892 for obj in ScmModel().get_repos(all_target_repos):
880 893 repos.append({
881 894 'id': obj['name'],
882 895 'text': obj['name'],
883 896 'type': 'repo',
884 897 'repo_id': obj['dbrepo']['repo_id'],
885 898 'repo_type': obj['dbrepo']['repo_type'],
886 899 'private': obj['dbrepo']['private'],
887 900
888 901 })
889 902
890 903 data = {
891 904 'more': False,
892 905 'results': [{
893 906 'text': _('Repositories'),
894 907 'children': repos
895 908 }] if repos else []
896 909 }
897 910 return data
898 911
899 912 @LoginRequired()
900 913 @NotAnonymous()
901 914 @HasRepoPermissionAnyDecorator(
902 915 'repository.read', 'repository.write', 'repository.admin')
903 916 @CSRFRequired()
904 917 @view_config(
905 918 route_name='pullrequest_create', request_method='POST',
906 919 renderer=None)
907 920 def pull_request_create(self):
908 921 _ = self.request.translate
909 922 self.assure_not_empty_repo()
910 923 self.load_default_context()
911 924
912 925 controls = peppercorn.parse(self.request.POST.items())
913 926
914 927 try:
915 928 form = PullRequestForm(
916 929 self.request.translate, self.db_repo.repo_id)()
917 930 _form = form.to_python(controls)
918 931 except formencode.Invalid as errors:
919 932 if errors.error_dict.get('revisions'):
920 933 msg = 'Revisions: %s' % errors.error_dict['revisions']
921 934 elif errors.error_dict.get('pullrequest_title'):
922 935 msg = errors.error_dict.get('pullrequest_title')
923 936 else:
924 937 msg = _('Error creating pull request: {}').format(errors)
925 938 log.exception(msg)
926 939 h.flash(msg, 'error')
927 940
928 941 # would rather just go back to form ...
929 942 raise HTTPFound(
930 943 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
931 944
932 945 source_repo = _form['source_repo']
933 946 source_ref = _form['source_ref']
934 947 target_repo = _form['target_repo']
935 948 target_ref = _form['target_ref']
936 949 commit_ids = _form['revisions'][::-1]
937 950
938 951 # find the ancestor for this pr
939 952 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
940 953 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
941 954
942 955 if not (source_db_repo or target_db_repo):
943 956 h.flash(_('source_repo or target repo not found'), category='error')
944 957 raise HTTPFound(
945 958 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
946 959
947 960 # re-check permissions again here
948 961 # source_repo we must have read permissions
949 962
950 963 source_perm = HasRepoPermissionAny(
951 964 'repository.read', 'repository.write', 'repository.admin')(
952 965 source_db_repo.repo_name)
953 966 if not source_perm:
954 967 msg = _('Not Enough permissions to source repo `{}`.'.format(
955 968 source_db_repo.repo_name))
956 969 h.flash(msg, category='error')
957 970 # copy the args back to redirect
958 971 org_query = self.request.GET.mixed()
959 972 raise HTTPFound(
960 973 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
961 974 _query=org_query))
962 975
963 976 # target repo we must have read permissions, and also later on
964 977 # we want to check branch permissions here
965 978 target_perm = HasRepoPermissionAny(
966 979 'repository.read', 'repository.write', 'repository.admin')(
967 980 target_db_repo.repo_name)
968 981 if not target_perm:
969 982 msg = _('Not Enough permissions to target repo `{}`.'.format(
970 983 target_db_repo.repo_name))
971 984 h.flash(msg, category='error')
972 985 # copy the args back to redirect
973 986 org_query = self.request.GET.mixed()
974 987 raise HTTPFound(
975 988 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
976 989 _query=org_query))
977 990
978 991 source_scm = source_db_repo.scm_instance()
979 992 target_scm = target_db_repo.scm_instance()
980 993
981 994 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
982 995 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
983 996
984 997 ancestor = source_scm.get_common_ancestor(
985 998 source_commit.raw_id, target_commit.raw_id, target_scm)
986 999
987 1000 # recalculate target ref based on ancestor
988 1001 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
989 1002 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
990 1003
991 1004 get_default_reviewers_data, validate_default_reviewers = \
992 1005 PullRequestModel().get_reviewer_functions()
993 1006
994 1007 # recalculate reviewers logic, to make sure we can validate this
995 1008 reviewer_rules = get_default_reviewers_data(
996 1009 self._rhodecode_db_user, source_db_repo,
997 1010 source_commit, target_db_repo, target_commit)
998 1011
999 1012 given_reviewers = _form['review_members']
1000 1013 reviewers = validate_default_reviewers(
1001 1014 given_reviewers, reviewer_rules)
1002 1015
1003 1016 pullrequest_title = _form['pullrequest_title']
1004 1017 title_source_ref = source_ref.split(':', 2)[1]
1005 1018 if not pullrequest_title:
1006 1019 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1007 1020 source=source_repo,
1008 1021 source_ref=title_source_ref,
1009 1022 target=target_repo
1010 1023 )
1011 1024
1012 1025 description = _form['pullrequest_desc']
1013 1026 description_renderer = _form['description_renderer']
1014 1027
1015 1028 try:
1016 1029 pull_request = PullRequestModel().create(
1017 1030 created_by=self._rhodecode_user.user_id,
1018 1031 source_repo=source_repo,
1019 1032 source_ref=source_ref,
1020 1033 target_repo=target_repo,
1021 1034 target_ref=target_ref,
1022 1035 revisions=commit_ids,
1023 1036 reviewers=reviewers,
1024 1037 title=pullrequest_title,
1025 1038 description=description,
1026 1039 description_renderer=description_renderer,
1027 1040 reviewer_data=reviewer_rules,
1028 1041 auth_user=self._rhodecode_user
1029 1042 )
1030 1043 Session().commit()
1031 1044
1032 1045 h.flash(_('Successfully opened new pull request'),
1033 1046 category='success')
1034 1047 except Exception:
1035 1048 msg = _('Error occurred during creation of this pull request.')
1036 1049 log.exception(msg)
1037 1050 h.flash(msg, category='error')
1038 1051
1039 1052 # copy the args back to redirect
1040 1053 org_query = self.request.GET.mixed()
1041 1054 raise HTTPFound(
1042 1055 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1043 1056 _query=org_query))
1044 1057
1045 1058 raise HTTPFound(
1046 1059 h.route_path('pullrequest_show', repo_name=target_repo,
1047 1060 pull_request_id=pull_request.pull_request_id))
1048 1061
1049 1062 @LoginRequired()
1050 1063 @NotAnonymous()
1051 1064 @HasRepoPermissionAnyDecorator(
1052 1065 'repository.read', 'repository.write', 'repository.admin')
1053 1066 @CSRFRequired()
1054 1067 @view_config(
1055 1068 route_name='pullrequest_update', request_method='POST',
1056 1069 renderer='json_ext')
1057 1070 def pull_request_update(self):
1058 1071 pull_request = PullRequest.get_or_404(
1059 1072 self.request.matchdict['pull_request_id'])
1060 1073 _ = self.request.translate
1061 1074
1062 1075 self.load_default_context()
1063 1076 redirect_url = None
1064 1077
1065 1078 if pull_request.is_closed():
1066 1079 log.debug('update: forbidden because pull request is closed')
1067 1080 msg = _(u'Cannot update closed pull requests.')
1068 1081 h.flash(msg, category='error')
1069 1082 return {'response': True,
1070 1083 'redirect_url': redirect_url}
1071 1084
1072 1085 is_state_changing = pull_request.is_state_changing()
1073 1086
1074 1087 # only owner or admin can update it
1075 1088 allowed_to_update = PullRequestModel().check_user_update(
1076 1089 pull_request, self._rhodecode_user)
1077 1090 if allowed_to_update:
1078 1091 controls = peppercorn.parse(self.request.POST.items())
1079 1092 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1080 1093
1081 1094 if 'review_members' in controls:
1082 1095 self._update_reviewers(
1083 1096 pull_request, controls['review_members'],
1084 1097 pull_request.reviewer_data)
1085 1098 elif str2bool(self.request.POST.get('update_commits', 'false')):
1086 1099 if is_state_changing:
1087 1100 log.debug('commits update: forbidden because pull request is in state %s',
1088 1101 pull_request.pull_request_state)
1089 1102 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1090 1103 u'Current state is: `{}`').format(
1091 1104 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1092 1105 h.flash(msg, category='error')
1093 1106 return {'response': True,
1094 1107 'redirect_url': redirect_url}
1095 1108
1096 1109 self._update_commits(pull_request)
1097 1110 if force_refresh:
1098 1111 redirect_url = h.route_path(
1099 1112 'pullrequest_show', repo_name=self.db_repo_name,
1100 1113 pull_request_id=pull_request.pull_request_id,
1101 1114 _query={"force_refresh": 1})
1102 1115 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1103 1116 self._edit_pull_request(pull_request)
1104 1117 else:
1105 1118 raise HTTPBadRequest()
1106 1119
1107 1120 return {'response': True,
1108 1121 'redirect_url': redirect_url}
1109 1122 raise HTTPForbidden()
1110 1123
1111 1124 def _edit_pull_request(self, pull_request):
1112 1125 _ = self.request.translate
1113 1126
1114 1127 try:
1115 1128 PullRequestModel().edit(
1116 1129 pull_request,
1117 1130 self.request.POST.get('title'),
1118 1131 self.request.POST.get('description'),
1119 1132 self.request.POST.get('description_renderer'),
1120 1133 self._rhodecode_user)
1121 1134 except ValueError:
1122 1135 msg = _(u'Cannot update closed pull requests.')
1123 1136 h.flash(msg, category='error')
1124 1137 return
1125 1138 else:
1126 1139 Session().commit()
1127 1140
1128 1141 msg = _(u'Pull request title & description updated.')
1129 1142 h.flash(msg, category='success')
1130 1143 return
1131 1144
1132 1145 def _update_commits(self, pull_request):
1133 1146 _ = self.request.translate
1134 1147
1135 1148 with pull_request.set_state(PullRequest.STATE_UPDATING):
1136 1149 resp = PullRequestModel().update_commits(
1137 1150 pull_request, self._rhodecode_db_user)
1138 1151
1139 1152 if resp.executed:
1140 1153
1141 1154 if resp.target_changed and resp.source_changed:
1142 1155 changed = 'target and source repositories'
1143 1156 elif resp.target_changed and not resp.source_changed:
1144 1157 changed = 'target repository'
1145 1158 elif not resp.target_changed and resp.source_changed:
1146 1159 changed = 'source repository'
1147 1160 else:
1148 1161 changed = 'nothing'
1149 1162
1150 1163 msg = _(u'Pull request updated to "{source_commit_id}" with '
1151 1164 u'{count_added} added, {count_removed} removed commits. '
1152 1165 u'Source of changes: {change_source}')
1153 1166 msg = msg.format(
1154 1167 source_commit_id=pull_request.source_ref_parts.commit_id,
1155 1168 count_added=len(resp.changes.added),
1156 1169 count_removed=len(resp.changes.removed),
1157 1170 change_source=changed)
1158 1171 h.flash(msg, category='success')
1159 1172
1160 1173 channel = '/repo${}$/pr/{}'.format(
1161 1174 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1162 1175 message = msg + (
1163 1176 ' - <a onclick="window.location.reload()">'
1164 1177 '<strong>{}</strong></a>'.format(_('Reload page')))
1165 1178 channelstream.post_message(
1166 1179 channel, message, self._rhodecode_user.username,
1167 1180 registry=self.request.registry)
1168 1181 else:
1169 1182 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1170 1183 warning_reasons = [
1171 1184 UpdateFailureReason.NO_CHANGE,
1172 1185 UpdateFailureReason.WRONG_REF_TYPE,
1173 1186 ]
1174 1187 category = 'warning' if resp.reason in warning_reasons else 'error'
1175 1188 h.flash(msg, category=category)
1176 1189
1177 1190 @LoginRequired()
1178 1191 @NotAnonymous()
1179 1192 @HasRepoPermissionAnyDecorator(
1180 1193 'repository.read', 'repository.write', 'repository.admin')
1181 1194 @CSRFRequired()
1182 1195 @view_config(
1183 1196 route_name='pullrequest_merge', request_method='POST',
1184 1197 renderer='json_ext')
1185 1198 def pull_request_merge(self):
1186 1199 """
1187 1200 Merge will perform a server-side merge of the specified
1188 1201 pull request, if the pull request is approved and mergeable.
1189 1202 After successful merging, the pull request is automatically
1190 1203 closed, with a relevant comment.
1191 1204 """
1192 1205 pull_request = PullRequest.get_or_404(
1193 1206 self.request.matchdict['pull_request_id'])
1194 1207 _ = self.request.translate
1195 1208
1196 1209 if pull_request.is_state_changing():
1197 1210 log.debug('show: forbidden because pull request is in state %s',
1198 1211 pull_request.pull_request_state)
1199 1212 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1200 1213 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1201 1214 pull_request.pull_request_state)
1202 1215 h.flash(msg, category='error')
1203 1216 raise HTTPFound(
1204 1217 h.route_path('pullrequest_show',
1205 1218 repo_name=pull_request.target_repo.repo_name,
1206 1219 pull_request_id=pull_request.pull_request_id))
1207 1220
1208 1221 self.load_default_context()
1209 1222
1210 1223 with pull_request.set_state(PullRequest.STATE_UPDATING):
1211 1224 check = MergeCheck.validate(
1212 1225 pull_request, auth_user=self._rhodecode_user,
1213 1226 translator=self.request.translate)
1214 1227 merge_possible = not check.failed
1215 1228
1216 1229 for err_type, error_msg in check.errors:
1217 1230 h.flash(error_msg, category=err_type)
1218 1231
1219 1232 if merge_possible:
1220 1233 log.debug("Pre-conditions checked, trying to merge.")
1221 1234 extras = vcs_operation_context(
1222 1235 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1223 1236 username=self._rhodecode_db_user.username, action='push',
1224 1237 scm=pull_request.target_repo.repo_type)
1225 1238 with pull_request.set_state(PullRequest.STATE_UPDATING):
1226 1239 self._merge_pull_request(
1227 1240 pull_request, self._rhodecode_db_user, extras)
1228 1241 else:
1229 1242 log.debug("Pre-conditions failed, NOT merging.")
1230 1243
1231 1244 raise HTTPFound(
1232 1245 h.route_path('pullrequest_show',
1233 1246 repo_name=pull_request.target_repo.repo_name,
1234 1247 pull_request_id=pull_request.pull_request_id))
1235 1248
1236 1249 def _merge_pull_request(self, pull_request, user, extras):
1237 1250 _ = self.request.translate
1238 1251 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1239 1252
1240 1253 if merge_resp.executed:
1241 1254 log.debug("The merge was successful, closing the pull request.")
1242 1255 PullRequestModel().close_pull_request(
1243 1256 pull_request.pull_request_id, user)
1244 1257 Session().commit()
1245 1258 msg = _('Pull request was successfully merged and closed.')
1246 1259 h.flash(msg, category='success')
1247 1260 else:
1248 1261 log.debug(
1249 1262 "The merge was not successful. Merge response: %s", merge_resp)
1250 1263 msg = merge_resp.merge_status_message
1251 1264 h.flash(msg, category='error')
1252 1265
1253 1266 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1254 1267 _ = self.request.translate
1255 1268
1256 1269 get_default_reviewers_data, validate_default_reviewers = \
1257 1270 PullRequestModel().get_reviewer_functions()
1258 1271
1259 1272 try:
1260 1273 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1261 1274 except ValueError as e:
1262 1275 log.error('Reviewers Validation: {}'.format(e))
1263 1276 h.flash(e, category='error')
1264 1277 return
1265 1278
1266 1279 old_calculated_status = pull_request.calculated_review_status()
1267 1280 PullRequestModel().update_reviewers(
1268 1281 pull_request, reviewers, self._rhodecode_user)
1269 1282 h.flash(_('Pull request reviewers updated.'), category='success')
1270 1283 Session().commit()
1271 1284
1272 1285 # trigger status changed if change in reviewers changes the status
1273 1286 calculated_status = pull_request.calculated_review_status()
1274 1287 if old_calculated_status != calculated_status:
1275 1288 PullRequestModel().trigger_pull_request_hook(
1276 1289 pull_request, self._rhodecode_user, 'review_status_change',
1277 1290 data={'status': calculated_status})
1278 1291
1279 1292 @LoginRequired()
1280 1293 @NotAnonymous()
1281 1294 @HasRepoPermissionAnyDecorator(
1282 1295 'repository.read', 'repository.write', 'repository.admin')
1283 1296 @CSRFRequired()
1284 1297 @view_config(
1285 1298 route_name='pullrequest_delete', request_method='POST',
1286 1299 renderer='json_ext')
1287 1300 def pull_request_delete(self):
1288 1301 _ = self.request.translate
1289 1302
1290 1303 pull_request = PullRequest.get_or_404(
1291 1304 self.request.matchdict['pull_request_id'])
1292 1305 self.load_default_context()
1293 1306
1294 1307 pr_closed = pull_request.is_closed()
1295 1308 allowed_to_delete = PullRequestModel().check_user_delete(
1296 1309 pull_request, self._rhodecode_user) and not pr_closed
1297 1310
1298 1311 # only owner can delete it !
1299 1312 if allowed_to_delete:
1300 1313 PullRequestModel().delete(pull_request, self._rhodecode_user)
1301 1314 Session().commit()
1302 1315 h.flash(_('Successfully deleted pull request'),
1303 1316 category='success')
1304 1317 raise HTTPFound(h.route_path('pullrequest_show_all',
1305 1318 repo_name=self.db_repo_name))
1306 1319
1307 1320 log.warning('user %s tried to delete pull request without access',
1308 1321 self._rhodecode_user)
1309 1322 raise HTTPNotFound()
1310 1323
1311 1324 @LoginRequired()
1312 1325 @NotAnonymous()
1313 1326 @HasRepoPermissionAnyDecorator(
1314 1327 'repository.read', 'repository.write', 'repository.admin')
1315 1328 @CSRFRequired()
1316 1329 @view_config(
1317 1330 route_name='pullrequest_comment_create', request_method='POST',
1318 1331 renderer='json_ext')
1319 1332 def pull_request_comment_create(self):
1320 1333 _ = self.request.translate
1321 1334
1322 1335 pull_request = PullRequest.get_or_404(
1323 1336 self.request.matchdict['pull_request_id'])
1324 1337 pull_request_id = pull_request.pull_request_id
1325 1338
1326 1339 if pull_request.is_closed():
1327 1340 log.debug('comment: forbidden because pull request is closed')
1328 1341 raise HTTPForbidden()
1329 1342
1330 1343 allowed_to_comment = PullRequestModel().check_user_comment(
1331 1344 pull_request, self._rhodecode_user)
1332 1345 if not allowed_to_comment:
1333 1346 log.debug(
1334 1347 'comment: forbidden because pull request is from forbidden repo')
1335 1348 raise HTTPForbidden()
1336 1349
1337 1350 c = self.load_default_context()
1338 1351
1339 1352 status = self.request.POST.get('changeset_status', None)
1340 1353 text = self.request.POST.get('text')
1341 1354 comment_type = self.request.POST.get('comment_type')
1342 1355 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1343 1356 close_pull_request = self.request.POST.get('close_pull_request')
1344 1357
1345 1358 # the logic here should work like following, if we submit close
1346 1359 # pr comment, use `close_pull_request_with_comment` function
1347 1360 # else handle regular comment logic
1348 1361
1349 1362 if close_pull_request:
1350 1363 # only owner or admin or person with write permissions
1351 1364 allowed_to_close = PullRequestModel().check_user_update(
1352 1365 pull_request, self._rhodecode_user)
1353 1366 if not allowed_to_close:
1354 1367 log.debug('comment: forbidden because not allowed to close '
1355 1368 'pull request %s', pull_request_id)
1356 1369 raise HTTPForbidden()
1357 1370
1358 1371 # This also triggers `review_status_change`
1359 1372 comment, status = PullRequestModel().close_pull_request_with_comment(
1360 1373 pull_request, self._rhodecode_user, self.db_repo, message=text,
1361 1374 auth_user=self._rhodecode_user)
1362 1375 Session().flush()
1363 1376
1364 1377 PullRequestModel().trigger_pull_request_hook(
1365 1378 pull_request, self._rhodecode_user, 'comment',
1366 1379 data={'comment': comment})
1367 1380
1368 1381 else:
1369 1382 # regular comment case, could be inline, or one with status.
1370 1383 # for that one we check also permissions
1371 1384
1372 1385 allowed_to_change_status = PullRequestModel().check_user_change_status(
1373 1386 pull_request, self._rhodecode_user)
1374 1387
1375 1388 if status and allowed_to_change_status:
1376 1389 message = (_('Status change %(transition_icon)s %(status)s')
1377 1390 % {'transition_icon': '>',
1378 1391 'status': ChangesetStatus.get_status_lbl(status)})
1379 1392 text = text or message
1380 1393
1381 1394 comment = CommentsModel().create(
1382 1395 text=text,
1383 1396 repo=self.db_repo.repo_id,
1384 1397 user=self._rhodecode_user.user_id,
1385 1398 pull_request=pull_request,
1386 1399 f_path=self.request.POST.get('f_path'),
1387 1400 line_no=self.request.POST.get('line'),
1388 1401 status_change=(ChangesetStatus.get_status_lbl(status)
1389 1402 if status and allowed_to_change_status else None),
1390 1403 status_change_type=(status
1391 1404 if status and allowed_to_change_status else None),
1392 1405 comment_type=comment_type,
1393 1406 resolves_comment_id=resolves_comment_id,
1394 1407 auth_user=self._rhodecode_user
1395 1408 )
1396 1409
1397 1410 if allowed_to_change_status:
1398 1411 # calculate old status before we change it
1399 1412 old_calculated_status = pull_request.calculated_review_status()
1400 1413
1401 1414 # get status if set !
1402 1415 if status:
1403 1416 ChangesetStatusModel().set_status(
1404 1417 self.db_repo.repo_id,
1405 1418 status,
1406 1419 self._rhodecode_user.user_id,
1407 1420 comment,
1408 1421 pull_request=pull_request
1409 1422 )
1410 1423
1411 1424 Session().flush()
1412 1425 # this is somehow required to get access to some relationship
1413 1426 # loaded on comment
1414 1427 Session().refresh(comment)
1415 1428
1416 1429 PullRequestModel().trigger_pull_request_hook(
1417 1430 pull_request, self._rhodecode_user, 'comment',
1418 1431 data={'comment': comment})
1419 1432
1420 1433 # we now calculate the status of pull request, and based on that
1421 1434 # calculation we set the commits status
1422 1435 calculated_status = pull_request.calculated_review_status()
1423 1436 if old_calculated_status != calculated_status:
1424 1437 PullRequestModel().trigger_pull_request_hook(
1425 1438 pull_request, self._rhodecode_user, 'review_status_change',
1426 1439 data={'status': calculated_status})
1427 1440
1428 1441 Session().commit()
1429 1442
1430 1443 data = {
1431 1444 'target_id': h.safeid(h.safe_unicode(
1432 1445 self.request.POST.get('f_path'))),
1433 1446 }
1434 1447 if comment:
1435 1448 c.co = comment
1436 1449 rendered_comment = render(
1437 1450 'rhodecode:templates/changeset/changeset_comment_block.mako',
1438 1451 self._get_template_context(c), self.request)
1439 1452
1440 1453 data.update(comment.get_dict())
1441 1454 data.update({'rendered_text': rendered_comment})
1442 1455
1443 1456 return data
1444 1457
1445 1458 @LoginRequired()
1446 1459 @NotAnonymous()
1447 1460 @HasRepoPermissionAnyDecorator(
1448 1461 'repository.read', 'repository.write', 'repository.admin')
1449 1462 @CSRFRequired()
1450 1463 @view_config(
1451 1464 route_name='pullrequest_comment_delete', request_method='POST',
1452 1465 renderer='json_ext')
1453 1466 def pull_request_comment_delete(self):
1454 1467 pull_request = PullRequest.get_or_404(
1455 1468 self.request.matchdict['pull_request_id'])
1456 1469
1457 1470 comment = ChangesetComment.get_or_404(
1458 1471 self.request.matchdict['comment_id'])
1459 1472 comment_id = comment.comment_id
1460 1473
1461 1474 if pull_request.is_closed():
1462 1475 log.debug('comment: forbidden because pull request is closed')
1463 1476 raise HTTPForbidden()
1464 1477
1465 1478 if not comment:
1466 1479 log.debug('Comment with id:%s not found, skipping', comment_id)
1467 1480 # comment already deleted in another call probably
1468 1481 return True
1469 1482
1470 1483 if comment.pull_request.is_closed():
1471 1484 # don't allow deleting comments on closed pull request
1472 1485 raise HTTPForbidden()
1473 1486
1474 1487 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1475 1488 super_admin = h.HasPermissionAny('hg.admin')()
1476 1489 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1477 1490 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1478 1491 comment_repo_admin = is_repo_admin and is_repo_comment
1479 1492
1480 1493 if super_admin or comment_owner or comment_repo_admin:
1481 1494 old_calculated_status = comment.pull_request.calculated_review_status()
1482 1495 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1483 1496 Session().commit()
1484 1497 calculated_status = comment.pull_request.calculated_review_status()
1485 1498 if old_calculated_status != calculated_status:
1486 1499 PullRequestModel().trigger_pull_request_hook(
1487 1500 comment.pull_request, self._rhodecode_user, 'review_status_change',
1488 1501 data={'status': calculated_status})
1489 1502 return True
1490 1503 else:
1491 1504 log.warning('No permissions for user %s to delete comment_id: %s',
1492 1505 self._rhodecode_db_user, comment_id)
1493 1506 raise HTTPNotFound()
@@ -1,1100 +1,1104 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Some simple helper functions
24 24 """
25 25
26 26 import collections
27 27 import datetime
28 28 import dateutil.relativedelta
29 29 import hashlib
30 30 import logging
31 31 import re
32 32 import sys
33 33 import time
34 34 import urllib
35 35 import urlobject
36 36 import uuid
37 37 import getpass
38 38 from functools import update_wrapper, partial
39 39
40 40 import pygments.lexers
41 41 import sqlalchemy
42 42 import sqlalchemy.engine.url
43 43 import sqlalchemy.exc
44 44 import sqlalchemy.sql
45 45 import webob
46 46 import pyramid.threadlocal
47 47 from pyramid import compat
48 48 from pyramid.settings import asbool
49 49
50 50 import rhodecode
51 51 from rhodecode.translation import _, _pluralize
52 52
53 53
54 54 def md5(s):
55 55 return hashlib.md5(s).hexdigest()
56 56
57 57
58 58 def md5_safe(s):
59 59 return md5(safe_str(s))
60 60
61 61
62 62 def sha1(s):
63 63 return hashlib.sha1(s).hexdigest()
64 64
65 65
66 66 def sha1_safe(s):
67 67 return sha1(safe_str(s))
68 68
69 69
70 70 def __get_lem(extra_mapping=None):
71 71 """
72 72 Get language extension map based on what's inside pygments lexers
73 73 """
74 74 d = collections.defaultdict(lambda: [])
75 75
76 76 def __clean(s):
77 77 s = s.lstrip('*')
78 78 s = s.lstrip('.')
79 79
80 80 if s.find('[') != -1:
81 81 exts = []
82 82 start, stop = s.find('['), s.find(']')
83 83
84 84 for suffix in s[start + 1:stop]:
85 85 exts.append(s[:s.find('[')] + suffix)
86 86 return [e.lower() for e in exts]
87 87 else:
88 88 return [s.lower()]
89 89
90 90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
91 91 m = map(__clean, t[-2])
92 92 if m:
93 93 m = reduce(lambda x, y: x + y, m)
94 94 for ext in m:
95 95 desc = lx.replace('Lexer', '')
96 96 d[ext].append(desc)
97 97
98 98 data = dict(d)
99 99
100 100 extra_mapping = extra_mapping or {}
101 101 if extra_mapping:
102 102 for k, v in extra_mapping.items():
103 103 if k not in data:
104 104 # register new mapping2lexer
105 105 data[k] = [v]
106 106
107 107 return data
108 108
109 109
110 110 def str2bool(_str):
111 111 """
112 112 returns True/False value from given string, it tries to translate the
113 113 string into boolean
114 114
115 115 :param _str: string value to translate into boolean
116 116 :rtype: boolean
117 117 :returns: boolean from given string
118 118 """
119 119 if _str is None:
120 120 return False
121 121 if _str in (True, False):
122 122 return _str
123 123 _str = str(_str).strip().lower()
124 124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
125 125
126 126
127 127 def aslist(obj, sep=None, strip=True):
128 128 """
129 129 Returns given string separated by sep as list
130 130
131 131 :param obj:
132 132 :param sep:
133 133 :param strip:
134 134 """
135 135 if isinstance(obj, (basestring,)):
136 136 lst = obj.split(sep)
137 137 if strip:
138 138 lst = [v.strip() for v in lst]
139 139 return lst
140 140 elif isinstance(obj, (list, tuple)):
141 141 return obj
142 142 elif obj is None:
143 143 return []
144 144 else:
145 145 return [obj]
146 146
147 147
148 148 def convert_line_endings(line, mode):
149 149 """
150 150 Converts a given line "line end" accordingly to given mode
151 151
152 152 Available modes are::
153 153 0 - Unix
154 154 1 - Mac
155 155 2 - DOS
156 156
157 157 :param line: given line to convert
158 158 :param mode: mode to convert to
159 159 :rtype: str
160 160 :return: converted line according to mode
161 161 """
162 162 if mode == 0:
163 163 line = line.replace('\r\n', '\n')
164 164 line = line.replace('\r', '\n')
165 165 elif mode == 1:
166 166 line = line.replace('\r\n', '\r')
167 167 line = line.replace('\n', '\r')
168 168 elif mode == 2:
169 169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
170 170 return line
171 171
172 172
173 173 def detect_mode(line, default):
174 174 """
175 175 Detects line break for given line, if line break couldn't be found
176 176 given default value is returned
177 177
178 178 :param line: str line
179 179 :param default: default
180 180 :rtype: int
181 181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
182 182 """
183 183 if line.endswith('\r\n'):
184 184 return 2
185 185 elif line.endswith('\n'):
186 186 return 0
187 187 elif line.endswith('\r'):
188 188 return 1
189 189 else:
190 190 return default
191 191
192 192
193 193 def safe_int(val, default=None):
194 194 """
195 195 Returns int() of val if val is not convertable to int use default
196 196 instead
197 197
198 198 :param val:
199 199 :param default:
200 200 """
201 201
202 202 try:
203 203 val = int(val)
204 204 except (ValueError, TypeError):
205 205 val = default
206 206
207 207 return val
208 208
209 209
210 210 def safe_unicode(str_, from_encoding=None, use_chardet=False):
211 211 """
212 212 safe unicode function. Does few trick to turn str_ into unicode
213 213
214 214 In case of UnicodeDecode error, we try to return it with encoding detected
215 215 by chardet library if it fails fallback to unicode with errors replaced
216 216
217 217 :param str_: string to decode
218 218 :rtype: unicode
219 219 :returns: unicode object
220 220 """
221 221 if isinstance(str_, unicode):
222 222 return str_
223 223
224 224 if not from_encoding:
225 225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
226 226 'utf8'), sep=',')
227 227 from_encoding = DEFAULT_ENCODINGS
228 228
229 229 if not isinstance(from_encoding, (list, tuple)):
230 230 from_encoding = [from_encoding]
231 231
232 232 try:
233 233 return unicode(str_)
234 234 except UnicodeDecodeError:
235 235 pass
236 236
237 237 for enc in from_encoding:
238 238 try:
239 239 return unicode(str_, enc)
240 240 except UnicodeDecodeError:
241 241 pass
242 242
243 243 if use_chardet:
244 244 try:
245 245 import chardet
246 246 encoding = chardet.detect(str_)['encoding']
247 247 if encoding is None:
248 248 raise Exception()
249 249 return str_.decode(encoding)
250 250 except (ImportError, UnicodeDecodeError, Exception):
251 251 return unicode(str_, from_encoding[0], 'replace')
252 252 else:
253 253 return unicode(str_, from_encoding[0], 'replace')
254 254
255 255 def safe_str(unicode_, to_encoding=None, use_chardet=False):
256 256 """
257 257 safe str function. Does few trick to turn unicode_ into string
258 258
259 259 In case of UnicodeEncodeError, we try to return it with encoding detected
260 260 by chardet library if it fails fallback to string with errors replaced
261 261
262 262 :param unicode_: unicode to encode
263 263 :rtype: str
264 264 :returns: str object
265 265 """
266 266
267 267 # if it's not basestr cast to str
268 268 if not isinstance(unicode_, compat.string_types):
269 269 return str(unicode_)
270 270
271 271 if isinstance(unicode_, str):
272 272 return unicode_
273 273
274 274 if not to_encoding:
275 275 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
276 276 'utf8'), sep=',')
277 277 to_encoding = DEFAULT_ENCODINGS
278 278
279 279 if not isinstance(to_encoding, (list, tuple)):
280 280 to_encoding = [to_encoding]
281 281
282 282 for enc in to_encoding:
283 283 try:
284 284 return unicode_.encode(enc)
285 285 except UnicodeEncodeError:
286 286 pass
287 287
288 288 if use_chardet:
289 289 try:
290 290 import chardet
291 291 encoding = chardet.detect(unicode_)['encoding']
292 292 if encoding is None:
293 293 raise UnicodeEncodeError()
294 294
295 295 return unicode_.encode(encoding)
296 296 except (ImportError, UnicodeEncodeError):
297 297 return unicode_.encode(to_encoding[0], 'replace')
298 298 else:
299 299 return unicode_.encode(to_encoding[0], 'replace')
300 300
301 301
302 302 def remove_suffix(s, suffix):
303 303 if s.endswith(suffix):
304 304 s = s[:-1 * len(suffix)]
305 305 return s
306 306
307 307
308 308 def remove_prefix(s, prefix):
309 309 if s.startswith(prefix):
310 310 s = s[len(prefix):]
311 311 return s
312 312
313 313
314 314 def find_calling_context(ignore_modules=None):
315 315 """
316 316 Look through the calling stack and return the frame which called
317 317 this function and is part of core module ( ie. rhodecode.* )
318 318
319 319 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
320 320 """
321 321
322 322 ignore_modules = ignore_modules or []
323 323
324 324 f = sys._getframe(2)
325 325 while f.f_back is not None:
326 326 name = f.f_globals.get('__name__')
327 327 if name and name.startswith(__name__.split('.')[0]):
328 328 if name not in ignore_modules:
329 329 return f
330 330 f = f.f_back
331 331 return None
332 332
333 333
334 334 def ping_connection(connection, branch):
335 335 if branch:
336 336 # "branch" refers to a sub-connection of a connection,
337 337 # we don't want to bother pinging on these.
338 338 return
339 339
340 340 # turn off "close with result". This flag is only used with
341 341 # "connectionless" execution, otherwise will be False in any case
342 342 save_should_close_with_result = connection.should_close_with_result
343 343 connection.should_close_with_result = False
344 344
345 345 try:
346 346 # run a SELECT 1. use a core select() so that
347 347 # the SELECT of a scalar value without a table is
348 348 # appropriately formatted for the backend
349 349 connection.scalar(sqlalchemy.sql.select([1]))
350 350 except sqlalchemy.exc.DBAPIError as err:
351 351 # catch SQLAlchemy's DBAPIError, which is a wrapper
352 352 # for the DBAPI's exception. It includes a .connection_invalidated
353 353 # attribute which specifies if this connection is a "disconnect"
354 354 # condition, which is based on inspection of the original exception
355 355 # by the dialect in use.
356 356 if err.connection_invalidated:
357 357 # run the same SELECT again - the connection will re-validate
358 358 # itself and establish a new connection. The disconnect detection
359 359 # here also causes the whole connection pool to be invalidated
360 360 # so that all stale connections are discarded.
361 361 connection.scalar(sqlalchemy.sql.select([1]))
362 362 else:
363 363 raise
364 364 finally:
365 365 # restore "close with result"
366 366 connection.should_close_with_result = save_should_close_with_result
367 367
368 368
369 369 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
370 370 """Custom engine_from_config functions."""
371 371 log = logging.getLogger('sqlalchemy.engine')
372 372 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
373 373 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
374 374
375 375 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
376 376
377 377 def color_sql(sql):
378 378 color_seq = '\033[1;33m' # This is yellow: code 33
379 379 normal = '\x1b[0m'
380 380 return ''.join([color_seq, sql, normal])
381 381
382 382 if use_ping_connection:
383 383 log.debug('Adding ping_connection on the engine config.')
384 384 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
385 385
386 386 if debug:
387 387 # attach events only for debug configuration
388 388 def before_cursor_execute(conn, cursor, statement,
389 389 parameters, context, executemany):
390 390 setattr(conn, 'query_start_time', time.time())
391 391 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
392 392 calling_context = find_calling_context(ignore_modules=[
393 393 'rhodecode.lib.caching_query',
394 394 'rhodecode.model.settings',
395 395 ])
396 396 if calling_context:
397 397 log.info(color_sql('call context %s:%s' % (
398 398 calling_context.f_code.co_filename,
399 399 calling_context.f_lineno,
400 400 )))
401 401
402 402 def after_cursor_execute(conn, cursor, statement,
403 403 parameters, context, executemany):
404 404 delattr(conn, 'query_start_time')
405 405
406 406 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
407 407 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
408 408
409 409 return engine
410 410
411 411
412 412 def get_encryption_key(config):
413 413 secret = config.get('rhodecode.encrypted_values.secret')
414 414 default = config['beaker.session.secret']
415 415 return secret or default
416 416
417 417
418 418 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
419 419 short_format=False):
420 420 """
421 421 Turns a datetime into an age string.
422 422 If show_short_version is True, this generates a shorter string with
423 423 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
424 424
425 425 * IMPORTANT*
426 426 Code of this function is written in special way so it's easier to
427 427 backport it to javascript. If you mean to update it, please also update
428 428 `jquery.timeago-extension.js` file
429 429
430 430 :param prevdate: datetime object
431 431 :param now: get current time, if not define we use
432 432 `datetime.datetime.now()`
433 433 :param show_short_version: if it should approximate the date and
434 434 return a shorter string
435 435 :param show_suffix:
436 436 :param short_format: show short format, eg 2D instead of 2 days
437 437 :rtype: unicode
438 438 :returns: unicode words describing age
439 439 """
440 440
441 441 def _get_relative_delta(now, prevdate):
442 442 base = dateutil.relativedelta.relativedelta(now, prevdate)
443 443 return {
444 444 'year': base.years,
445 445 'month': base.months,
446 446 'day': base.days,
447 447 'hour': base.hours,
448 448 'minute': base.minutes,
449 449 'second': base.seconds,
450 450 }
451 451
452 452 def _is_leap_year(year):
453 453 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
454 454
455 455 def get_month(prevdate):
456 456 return prevdate.month
457 457
458 458 def get_year(prevdate):
459 459 return prevdate.year
460 460
461 461 now = now or datetime.datetime.now()
462 462 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
463 463 deltas = {}
464 464 future = False
465 465
466 466 if prevdate > now:
467 467 now_old = now
468 468 now = prevdate
469 469 prevdate = now_old
470 470 future = True
471 471 if future:
472 472 prevdate = prevdate.replace(microsecond=0)
473 473 # Get date parts deltas
474 474 for part in order:
475 475 rel_delta = _get_relative_delta(now, prevdate)
476 476 deltas[part] = rel_delta[part]
477 477
478 478 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
479 479 # not 1 hour, -59 minutes and -59 seconds)
480 480 offsets = [[5, 60], [4, 60], [3, 24]]
481 481 for element in offsets: # seconds, minutes, hours
482 482 num = element[0]
483 483 length = element[1]
484 484
485 485 part = order[num]
486 486 carry_part = order[num - 1]
487 487
488 488 if deltas[part] < 0:
489 489 deltas[part] += length
490 490 deltas[carry_part] -= 1
491 491
492 492 # Same thing for days except that the increment depends on the (variable)
493 493 # number of days in the month
494 494 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
495 495 if deltas['day'] < 0:
496 496 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
497 497 deltas['day'] += 29
498 498 else:
499 499 deltas['day'] += month_lengths[get_month(prevdate) - 1]
500 500
501 501 deltas['month'] -= 1
502 502
503 503 if deltas['month'] < 0:
504 504 deltas['month'] += 12
505 505 deltas['year'] -= 1
506 506
507 507 # Format the result
508 508 if short_format:
509 509 fmt_funcs = {
510 510 'year': lambda d: u'%dy' % d,
511 511 'month': lambda d: u'%dm' % d,
512 512 'day': lambda d: u'%dd' % d,
513 513 'hour': lambda d: u'%dh' % d,
514 514 'minute': lambda d: u'%dmin' % d,
515 515 'second': lambda d: u'%dsec' % d,
516 516 }
517 517 else:
518 518 fmt_funcs = {
519 519 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
520 520 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
521 521 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
522 522 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
523 523 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
524 524 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
525 525 }
526 526
527 527 i = 0
528 528 for part in order:
529 529 value = deltas[part]
530 530 if value != 0:
531 531
532 532 if i < 5:
533 533 sub_part = order[i + 1]
534 534 sub_value = deltas[sub_part]
535 535 else:
536 536 sub_value = 0
537 537
538 538 if sub_value == 0 or show_short_version:
539 539 _val = fmt_funcs[part](value)
540 540 if future:
541 541 if show_suffix:
542 542 return _(u'in ${ago}', mapping={'ago': _val})
543 543 else:
544 544 return _(_val)
545 545
546 546 else:
547 547 if show_suffix:
548 548 return _(u'${ago} ago', mapping={'ago': _val})
549 549 else:
550 550 return _(_val)
551 551
552 552 val = fmt_funcs[part](value)
553 553 val_detail = fmt_funcs[sub_part](sub_value)
554 554 mapping = {'val': val, 'detail': val_detail}
555 555
556 556 if short_format:
557 557 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
558 558 if show_suffix:
559 559 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
560 560 if future:
561 561 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
562 562 else:
563 563 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
564 564 if show_suffix:
565 565 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
566 566 if future:
567 567 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
568 568
569 569 return datetime_tmpl
570 570 i += 1
571 571 return _(u'just now')
572 572
573 573
574 574 def age_from_seconds(seconds):
575 575 seconds = safe_int(seconds) or 0
576 576 prevdate = time_to_datetime(time.time() + seconds)
577 577 return age(prevdate, show_suffix=False, show_short_version=True)
578 578
579 579
580 580 def cleaned_uri(uri):
581 581 """
582 582 Quotes '[' and ']' from uri if there is only one of them.
583 583 according to RFC3986 we cannot use such chars in uri
584 584 :param uri:
585 585 :return: uri without this chars
586 586 """
587 587 return urllib.quote(uri, safe='@$:/')
588 588
589 589
590 590 def uri_filter(uri):
591 591 """
592 592 Removes user:password from given url string
593 593
594 594 :param uri:
595 595 :rtype: unicode
596 596 :returns: filtered list of strings
597 597 """
598 598 if not uri:
599 599 return ''
600 600
601 601 proto = ''
602 602
603 603 for pat in ('https://', 'http://'):
604 604 if uri.startswith(pat):
605 605 uri = uri[len(pat):]
606 606 proto = pat
607 607 break
608 608
609 609 # remove passwords and username
610 610 uri = uri[uri.find('@') + 1:]
611 611
612 612 # get the port
613 613 cred_pos = uri.find(':')
614 614 if cred_pos == -1:
615 615 host, port = uri, None
616 616 else:
617 617 host, port = uri[:cred_pos], uri[cred_pos + 1:]
618 618
619 619 return filter(None, [proto, host, port])
620 620
621 621
622 622 def credentials_filter(uri):
623 623 """
624 624 Returns a url with removed credentials
625 625
626 626 :param uri:
627 627 """
628 628
629 629 uri = uri_filter(uri)
630 630 # check if we have port
631 631 if len(uri) > 2 and uri[2]:
632 632 uri[2] = ':' + uri[2]
633 633
634 634 return ''.join(uri)
635 635
636 636
637 637 def get_host_info(request):
638 638 """
639 639 Generate host info, to obtain full url e.g https://server.com
640 640 use this
641 641 `{scheme}://{netloc}`
642 642 """
643 643 if not request:
644 644 return {}
645 645
646 646 qualified_home_url = request.route_url('home')
647 647 parsed_url = urlobject.URLObject(qualified_home_url)
648 648 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
649 649
650 650 return {
651 651 'scheme': parsed_url.scheme,
652 652 'netloc': parsed_url.netloc+decoded_path,
653 653 'hostname': parsed_url.hostname,
654 654 }
655 655
656 656
657 657 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
658 658 qualified_home_url = request.route_url('home')
659 659 parsed_url = urlobject.URLObject(qualified_home_url)
660 660 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
661 661
662 662 args = {
663 663 'scheme': parsed_url.scheme,
664 664 'user': '',
665 665 'sys_user': getpass.getuser(),
666 666 # path if we use proxy-prefix
667 667 'netloc': parsed_url.netloc+decoded_path,
668 668 'hostname': parsed_url.hostname,
669 669 'prefix': decoded_path,
670 670 'repo': repo_name,
671 671 'repoid': str(repo_id),
672 672 'repo_type': repo_type
673 673 }
674 674 args.update(override)
675 675 args['user'] = urllib.quote(safe_str(args['user']))
676 676
677 677 for k, v in args.items():
678 678 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
679 679
680 680 # special case for SVN clone url
681 681 if repo_type == 'svn':
682 682 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
683 683
684 684 # remove leading @ sign if it's present. Case of empty user
685 685 url_obj = urlobject.URLObject(uri_tmpl)
686 686 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
687 687
688 688 return safe_unicode(url)
689 689
690 690
691 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
691 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
692 maybe_unreachable=False):
692 693 """
693 694 Safe version of get_commit if this commit doesn't exists for a
694 695 repository it returns a Dummy one instead
695 696
696 697 :param repo: repository instance
697 698 :param commit_id: commit id as str
699 :param commit_idx: numeric commit index
698 700 :param pre_load: optional list of commit attributes to load
701 :param maybe_unreachable: translate unreachable commits on git repos
699 702 """
700 703 # TODO(skreft): remove these circular imports
701 704 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
702 705 from rhodecode.lib.vcs.exceptions import RepositoryError
703 706 if not isinstance(repo, BaseRepository):
704 707 raise Exception('You must pass an Repository '
705 708 'object as first argument got %s', type(repo))
706 709
707 710 try:
708 711 commit = repo.get_commit(
709 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
712 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
713 maybe_unreachable=maybe_unreachable)
710 714 except (RepositoryError, LookupError):
711 715 commit = EmptyCommit()
712 716 return commit
713 717
714 718
715 719 def datetime_to_time(dt):
716 720 if dt:
717 721 return time.mktime(dt.timetuple())
718 722
719 723
720 724 def time_to_datetime(tm):
721 725 if tm:
722 726 if isinstance(tm, compat.string_types):
723 727 try:
724 728 tm = float(tm)
725 729 except ValueError:
726 730 return
727 731 return datetime.datetime.fromtimestamp(tm)
728 732
729 733
730 734 def time_to_utcdatetime(tm):
731 735 if tm:
732 736 if isinstance(tm, compat.string_types):
733 737 try:
734 738 tm = float(tm)
735 739 except ValueError:
736 740 return
737 741 return datetime.datetime.utcfromtimestamp(tm)
738 742
739 743
740 744 MENTIONS_REGEX = re.compile(
741 745 # ^@ or @ without any special chars in front
742 746 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
743 747 # main body starts with letter, then can be . - _
744 748 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
745 749 re.VERBOSE | re.MULTILINE)
746 750
747 751
748 752 def extract_mentioned_users(s):
749 753 """
750 754 Returns unique usernames from given string s that have @mention
751 755
752 756 :param s: string to get mentions
753 757 """
754 758 usrs = set()
755 759 for username in MENTIONS_REGEX.findall(s):
756 760 usrs.add(username)
757 761
758 762 return sorted(list(usrs), key=lambda k: k.lower())
759 763
760 764
761 765 class AttributeDictBase(dict):
762 766 def __getstate__(self):
763 767 odict = self.__dict__ # get attribute dictionary
764 768 return odict
765 769
766 770 def __setstate__(self, dict):
767 771 self.__dict__ = dict
768 772
769 773 __setattr__ = dict.__setitem__
770 774 __delattr__ = dict.__delitem__
771 775
772 776
773 777 class StrictAttributeDict(AttributeDictBase):
774 778 """
775 779 Strict Version of Attribute dict which raises an Attribute error when
776 780 requested attribute is not set
777 781 """
778 782 def __getattr__(self, attr):
779 783 try:
780 784 return self[attr]
781 785 except KeyError:
782 786 raise AttributeError('%s object has no attribute %s' % (
783 787 self.__class__, attr))
784 788
785 789
786 790 class AttributeDict(AttributeDictBase):
787 791 def __getattr__(self, attr):
788 792 return self.get(attr, None)
789 793
790 794
791 795
792 796 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
793 797 def __init__(self, default_factory=None, *args, **kwargs):
794 798 # in python3 you can omit the args to super
795 799 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
796 800 self.default_factory = default_factory
797 801
798 802
799 803 def fix_PATH(os_=None):
800 804 """
801 805 Get current active python path, and append it to PATH variable to fix
802 806 issues of subprocess calls and different python versions
803 807 """
804 808 if os_ is None:
805 809 import os
806 810 else:
807 811 os = os_
808 812
809 813 cur_path = os.path.split(sys.executable)[0]
810 814 if not os.environ['PATH'].startswith(cur_path):
811 815 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
812 816
813 817
814 818 def obfuscate_url_pw(engine):
815 819 _url = engine or ''
816 820 try:
817 821 _url = sqlalchemy.engine.url.make_url(engine)
818 822 if _url.password:
819 823 _url.password = 'XXXXX'
820 824 except Exception:
821 825 pass
822 826 return unicode(_url)
823 827
824 828
825 829 def get_server_url(environ):
826 830 req = webob.Request(environ)
827 831 return req.host_url + req.script_name
828 832
829 833
830 834 def unique_id(hexlen=32):
831 835 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
832 836 return suuid(truncate_to=hexlen, alphabet=alphabet)
833 837
834 838
835 839 def suuid(url=None, truncate_to=22, alphabet=None):
836 840 """
837 841 Generate and return a short URL safe UUID.
838 842
839 843 If the url parameter is provided, set the namespace to the provided
840 844 URL and generate a UUID.
841 845
842 846 :param url to get the uuid for
843 847 :truncate_to: truncate the basic 22 UUID to shorter version
844 848
845 849 The IDs won't be universally unique any longer, but the probability of
846 850 a collision will still be very low.
847 851 """
848 852 # Define our alphabet.
849 853 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
850 854
851 855 # If no URL is given, generate a random UUID.
852 856 if url is None:
853 857 unique_id = uuid.uuid4().int
854 858 else:
855 859 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
856 860
857 861 alphabet_length = len(_ALPHABET)
858 862 output = []
859 863 while unique_id > 0:
860 864 digit = unique_id % alphabet_length
861 865 output.append(_ALPHABET[digit])
862 866 unique_id = int(unique_id / alphabet_length)
863 867 return "".join(output)[:truncate_to]
864 868
865 869
866 870 def get_current_rhodecode_user(request=None):
867 871 """
868 872 Gets rhodecode user from request
869 873 """
870 874 pyramid_request = request or pyramid.threadlocal.get_current_request()
871 875
872 876 # web case
873 877 if pyramid_request and hasattr(pyramid_request, 'user'):
874 878 return pyramid_request.user
875 879
876 880 # api case
877 881 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
878 882 return pyramid_request.rpc_user
879 883
880 884 return None
881 885
882 886
883 887 def action_logger_generic(action, namespace=''):
884 888 """
885 889 A generic logger for actions useful to the system overview, tries to find
886 890 an acting user for the context of the call otherwise reports unknown user
887 891
888 892 :param action: logging message eg 'comment 5 deleted'
889 893 :param type: string
890 894
891 895 :param namespace: namespace of the logging message eg. 'repo.comments'
892 896 :param type: string
893 897
894 898 """
895 899
896 900 logger_name = 'rhodecode.actions'
897 901
898 902 if namespace:
899 903 logger_name += '.' + namespace
900 904
901 905 log = logging.getLogger(logger_name)
902 906
903 907 # get a user if we can
904 908 user = get_current_rhodecode_user()
905 909
906 910 logfunc = log.info
907 911
908 912 if not user:
909 913 user = '<unknown user>'
910 914 logfunc = log.warning
911 915
912 916 logfunc('Logging action by {}: {}'.format(user, action))
913 917
914 918
915 919 def escape_split(text, sep=',', maxsplit=-1):
916 920 r"""
917 921 Allows for escaping of the separator: e.g. arg='foo\, bar'
918 922
919 923 It should be noted that the way bash et. al. do command line parsing, those
920 924 single quotes are required.
921 925 """
922 926 escaped_sep = r'\%s' % sep
923 927
924 928 if escaped_sep not in text:
925 929 return text.split(sep, maxsplit)
926 930
927 931 before, _mid, after = text.partition(escaped_sep)
928 932 startlist = before.split(sep, maxsplit) # a regular split is fine here
929 933 unfinished = startlist[-1]
930 934 startlist = startlist[:-1]
931 935
932 936 # recurse because there may be more escaped separators
933 937 endlist = escape_split(after, sep, maxsplit)
934 938
935 939 # finish building the escaped value. we use endlist[0] becaue the first
936 940 # part of the string sent in recursion is the rest of the escaped value.
937 941 unfinished += sep + endlist[0]
938 942
939 943 return startlist + [unfinished] + endlist[1:] # put together all the parts
940 944
941 945
942 946 class OptionalAttr(object):
943 947 """
944 948 Special Optional Option that defines other attribute. Example::
945 949
946 950 def test(apiuser, userid=Optional(OAttr('apiuser')):
947 951 user = Optional.extract(userid)
948 952 # calls
949 953
950 954 """
951 955
952 956 def __init__(self, attr_name):
953 957 self.attr_name = attr_name
954 958
955 959 def __repr__(self):
956 960 return '<OptionalAttr:%s>' % self.attr_name
957 961
958 962 def __call__(self):
959 963 return self
960 964
961 965
962 966 # alias
963 967 OAttr = OptionalAttr
964 968
965 969
966 970 class Optional(object):
967 971 """
968 972 Defines an optional parameter::
969 973
970 974 param = param.getval() if isinstance(param, Optional) else param
971 975 param = param() if isinstance(param, Optional) else param
972 976
973 977 is equivalent of::
974 978
975 979 param = Optional.extract(param)
976 980
977 981 """
978 982
979 983 def __init__(self, type_):
980 984 self.type_ = type_
981 985
982 986 def __repr__(self):
983 987 return '<Optional:%s>' % self.type_.__repr__()
984 988
985 989 def __call__(self):
986 990 return self.getval()
987 991
988 992 def getval(self):
989 993 """
990 994 returns value from this Optional instance
991 995 """
992 996 if isinstance(self.type_, OAttr):
993 997 # use params name
994 998 return self.type_.attr_name
995 999 return self.type_
996 1000
997 1001 @classmethod
998 1002 def extract(cls, val):
999 1003 """
1000 1004 Extracts value from Optional() instance
1001 1005
1002 1006 :param val:
1003 1007 :return: original value if it's not Optional instance else
1004 1008 value of instance
1005 1009 """
1006 1010 if isinstance(val, cls):
1007 1011 return val.getval()
1008 1012 return val
1009 1013
1010 1014
1011 1015 def glob2re(pat):
1012 1016 """
1013 1017 Translate a shell PATTERN to a regular expression.
1014 1018
1015 1019 There is no way to quote meta-characters.
1016 1020 """
1017 1021
1018 1022 i, n = 0, len(pat)
1019 1023 res = ''
1020 1024 while i < n:
1021 1025 c = pat[i]
1022 1026 i = i+1
1023 1027 if c == '*':
1024 1028 #res = res + '.*'
1025 1029 res = res + '[^/]*'
1026 1030 elif c == '?':
1027 1031 #res = res + '.'
1028 1032 res = res + '[^/]'
1029 1033 elif c == '[':
1030 1034 j = i
1031 1035 if j < n and pat[j] == '!':
1032 1036 j = j+1
1033 1037 if j < n and pat[j] == ']':
1034 1038 j = j+1
1035 1039 while j < n and pat[j] != ']':
1036 1040 j = j+1
1037 1041 if j >= n:
1038 1042 res = res + '\\['
1039 1043 else:
1040 1044 stuff = pat[i:j].replace('\\','\\\\')
1041 1045 i = j+1
1042 1046 if stuff[0] == '!':
1043 1047 stuff = '^' + stuff[1:]
1044 1048 elif stuff[0] == '^':
1045 1049 stuff = '\\' + stuff
1046 1050 res = '%s[%s]' % (res, stuff)
1047 1051 else:
1048 1052 res = res + re.escape(c)
1049 1053 return res + '\Z(?ms)'
1050 1054
1051 1055
1052 1056 def parse_byte_string(size_str):
1053 1057 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1054 1058 if not match:
1055 1059 raise ValueError('Given size:%s is invalid, please make sure '
1056 1060 'to use format of <num>(MB|KB)' % size_str)
1057 1061
1058 1062 _parts = match.groups()
1059 1063 num, type_ = _parts
1060 1064 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1061 1065
1062 1066
1063 1067 class CachedProperty(object):
1064 1068 """
1065 1069 Lazy Attributes. With option to invalidate the cache by running a method
1066 1070
1067 1071 class Foo():
1068 1072
1069 1073 @CachedProperty
1070 1074 def heavy_func():
1071 1075 return 'super-calculation'
1072 1076
1073 1077 foo = Foo()
1074 1078 foo.heavy_func() # first computions
1075 1079 foo.heavy_func() # fetch from cache
1076 1080 foo._invalidate_prop_cache('heavy_func')
1077 1081 # at this point calling foo.heavy_func() will be re-computed
1078 1082 """
1079 1083
1080 1084 def __init__(self, func, func_name=None):
1081 1085
1082 1086 if func_name is None:
1083 1087 func_name = func.__name__
1084 1088 self.data = (func, func_name)
1085 1089 update_wrapper(self, func)
1086 1090
1087 1091 def __get__(self, inst, class_):
1088 1092 if inst is None:
1089 1093 return self
1090 1094
1091 1095 func, func_name = self.data
1092 1096 value = func(inst)
1093 1097 inst.__dict__[func_name] = value
1094 1098 if '_invalidate_prop_cache' not in inst.__dict__:
1095 1099 inst.__dict__['_invalidate_prop_cache'] = partial(
1096 1100 self._invalidate_prop_cache, inst)
1097 1101 return value
1098 1102
1099 1103 def _invalidate_prop_cache(self, inst, name):
1100 1104 inst.__dict__.pop(name, None)
@@ -1,1899 +1,1901 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from pyramid import compat
38 38
39 39 import rhodecode
40 40 from rhodecode.translation import lazy_ugettext
41 41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 42 from rhodecode.lib.vcs import connection
43 43 from rhodecode.lib.vcs.utils import author_name, author_email
44 44 from rhodecode.lib.vcs.conf import settings
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 50 RepositoryError)
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 FILEMODE_DEFAULT = 0o100644
57 57 FILEMODE_EXECUTABLE = 0o100755
58 58 EMPTY_COMMIT_ID = '0' * 40
59 59
60 60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61 61
62 62
63 63 class MergeFailureReason(object):
64 64 """
65 65 Enumeration with all the reasons why the server side merge could fail.
66 66
67 67 DO NOT change the number of the reasons, as they may be stored in the
68 68 database.
69 69
70 70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 71 reasons.
72 72 """
73 73
74 74 # Everything went well.
75 75 NONE = 0
76 76
77 77 # An unexpected exception was raised. Check the logs for more details.
78 78 UNKNOWN = 1
79 79
80 80 # The merge was not successful, there are conflicts.
81 81 MERGE_FAILED = 2
82 82
83 83 # The merge succeeded but we could not push it to the target repository.
84 84 PUSH_FAILED = 3
85 85
86 86 # The specified target is not a head in the target repository.
87 87 TARGET_IS_NOT_HEAD = 4
88 88
89 89 # The source repository contains more branches than the target. Pushing
90 90 # the merge will create additional branches in the target.
91 91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92 92
93 93 # The target reference has multiple heads. That does not allow to correctly
94 94 # identify the target location. This could only happen for mercurial
95 95 # branches.
96 96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97 97
98 98 # The target repository is locked
99 99 TARGET_IS_LOCKED = 7
100 100
101 101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 102 # A involved commit could not be found.
103 103 _DEPRECATED_MISSING_COMMIT = 8
104 104
105 105 # The target repo reference is missing.
106 106 MISSING_TARGET_REF = 9
107 107
108 108 # The source repo reference is missing.
109 109 MISSING_SOURCE_REF = 10
110 110
111 111 # The merge was not successful, there are conflicts related to sub
112 112 # repositories.
113 113 SUBREPO_MERGE_FAILED = 11
114 114
115 115
116 116 class UpdateFailureReason(object):
117 117 """
118 118 Enumeration with all the reasons why the pull request update could fail.
119 119
120 120 DO NOT change the number of the reasons, as they may be stored in the
121 121 database.
122 122
123 123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 124 reasons.
125 125 """
126 126
127 127 # Everything went well.
128 128 NONE = 0
129 129
130 130 # An unexpected exception was raised. Check the logs for more details.
131 131 UNKNOWN = 1
132 132
133 133 # The pull request is up to date.
134 134 NO_CHANGE = 2
135 135
136 136 # The pull request has a reference type that is not supported for update.
137 137 WRONG_REF_TYPE = 3
138 138
139 139 # Update failed because the target reference is missing.
140 140 MISSING_TARGET_REF = 4
141 141
142 142 # Update failed because the source reference is missing.
143 143 MISSING_SOURCE_REF = 5
144 144
145 145
146 146 class MergeResponse(object):
147 147
148 148 # uses .format(**metadata) for variables
149 149 MERGE_STATUS_MESSAGES = {
150 150 MergeFailureReason.NONE: lazy_ugettext(
151 151 u'This pull request can be automatically merged.'),
152 152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 153 u'This pull request cannot be merged because of an unhandled exception. '
154 154 u'{exception}'),
155 155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 156 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
157 157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 158 u'This pull request could not be merged because push to '
159 159 u'target:`{target}@{merge_commit}` failed.'),
160 160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 161 u'This pull request cannot be merged because the target '
162 162 u'`{target_ref.name}` is not a head.'),
163 163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 164 u'This pull request cannot be merged because the source contains '
165 165 u'more branches than the target.'),
166 166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 168 u'has multiple heads: `{heads}`.'),
169 169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 170 u'This pull request cannot be merged because the target repository is '
171 171 u'locked by {locked_by}.'),
172 172
173 173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 174 u'This pull request cannot be merged because the target '
175 175 u'reference `{target_ref.name}` is missing.'),
176 176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 177 u'This pull request cannot be merged because the source '
178 178 u'reference `{source_ref.name}` is missing.'),
179 179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 180 u'This pull request cannot be merged because of conflicts related '
181 181 u'to sub repositories.'),
182 182
183 183 # Deprecations
184 184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 185 u'This pull request cannot be merged because the target or the '
186 186 u'source reference is missing.'),
187 187
188 188 }
189 189
190 190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 191 self.possible = possible
192 192 self.executed = executed
193 193 self.merge_ref = merge_ref
194 194 self.failure_reason = failure_reason
195 195 self.metadata = metadata or {}
196 196
197 197 def __repr__(self):
198 198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199 199
200 200 def __eq__(self, other):
201 201 same_instance = isinstance(other, self.__class__)
202 202 return same_instance \
203 203 and self.possible == other.possible \
204 204 and self.executed == other.executed \
205 205 and self.failure_reason == other.failure_reason
206 206
207 207 @property
208 208 def label(self):
209 209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 210 not k.startswith('_'))
211 211 return label_dict.get(self.failure_reason)
212 212
213 213 @property
214 214 def merge_status_message(self):
215 215 """
216 216 Return a human friendly error message for the given merge status code.
217 217 """
218 218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219
219 220 try:
220 221 return msg.format(**self.metadata)
221 222 except Exception:
222 223 log.exception('Failed to format %s message', self)
223 224 return msg
224 225
225 226 def asdict(self):
226 227 data = {}
227 228 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 229 'merge_status_message']:
229 230 data[k] = getattr(self, k)
230 231 return data
231 232
232 233
233 234 class BaseRepository(object):
234 235 """
235 236 Base Repository for final backends
236 237
237 238 .. attribute:: DEFAULT_BRANCH_NAME
238 239
239 240 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240 241
241 242 .. attribute:: commit_ids
242 243
243 244 list of all available commit ids, in ascending order
244 245
245 246 .. attribute:: path
246 247
247 248 absolute path to the repository
248 249
249 250 .. attribute:: bookmarks
250 251
251 252 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 253 there are no bookmarks or the backend implementation does not support
253 254 bookmarks.
254 255
255 256 .. attribute:: tags
256 257
257 258 Mapping from name to :term:`Commit ID` of the tag.
258 259
259 260 """
260 261
261 262 DEFAULT_BRANCH_NAME = None
262 263 DEFAULT_CONTACT = u"Unknown"
263 264 DEFAULT_DESCRIPTION = u"unknown"
264 265 EMPTY_COMMIT_ID = '0' * 40
265 266
266 267 path = None
267 268
268 269 _is_empty = None
269 270 _commit_ids = {}
270 271
271 272 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 273 """
273 274 Initializes repository. Raises RepositoryError if repository could
274 275 not be find at the given ``repo_path`` or directory at ``repo_path``
275 276 exists and ``create`` is set to True.
276 277
277 278 :param repo_path: local path of the repository
278 279 :param config: repository configuration
279 280 :param create=False: if set to True, would try to create repository.
280 281 :param src_url=None: if set, should be proper url from which repository
281 282 would be cloned; requires ``create`` parameter to be set to True -
282 283 raises RepositoryError if src_url is set and create evaluates to
283 284 False
284 285 """
285 286 raise NotImplementedError
286 287
287 288 def __repr__(self):
288 289 return '<%s at %s>' % (self.__class__.__name__, self.path)
289 290
290 291 def __len__(self):
291 292 return self.count()
292 293
293 294 def __eq__(self, other):
294 295 same_instance = isinstance(other, self.__class__)
295 296 return same_instance and other.path == self.path
296 297
297 298 def __ne__(self, other):
298 299 return not self.__eq__(other)
299 300
300 301 def get_create_shadow_cache_pr_path(self, db_repo):
301 302 path = db_repo.cached_diffs_dir
302 303 if not os.path.exists(path):
303 304 os.makedirs(path, 0o755)
304 305 return path
305 306
306 307 @classmethod
307 308 def get_default_config(cls, default=None):
308 309 config = Config()
309 310 if default and isinstance(default, list):
310 311 for section, key, val in default:
311 312 config.set(section, key, val)
312 313 return config
313 314
314 315 @LazyProperty
315 316 def _remote(self):
316 317 raise NotImplementedError
317 318
318 319 def _heads(self, branch=None):
319 320 return []
320 321
321 322 @LazyProperty
322 323 def EMPTY_COMMIT(self):
323 324 return EmptyCommit(self.EMPTY_COMMIT_ID)
324 325
325 326 @LazyProperty
326 327 def alias(self):
327 328 for k, v in settings.BACKENDS.items():
328 329 if v.split('.')[-1] == str(self.__class__.__name__):
329 330 return k
330 331
331 332 @LazyProperty
332 333 def name(self):
333 334 return safe_unicode(os.path.basename(self.path))
334 335
335 336 @LazyProperty
336 337 def description(self):
337 338 raise NotImplementedError
338 339
339 340 def refs(self):
340 341 """
341 342 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 343 for this repository
343 344 """
344 345 return dict(
345 346 branches=self.branches,
346 347 branches_closed=self.branches_closed,
347 348 tags=self.tags,
348 349 bookmarks=self.bookmarks
349 350 )
350 351
351 352 @LazyProperty
352 353 def branches(self):
353 354 """
354 355 A `dict` which maps branch names to commit ids.
355 356 """
356 357 raise NotImplementedError
357 358
358 359 @LazyProperty
359 360 def branches_closed(self):
360 361 """
361 362 A `dict` which maps tags names to commit ids.
362 363 """
363 364 raise NotImplementedError
364 365
365 366 @LazyProperty
366 367 def bookmarks(self):
367 368 """
368 369 A `dict` which maps tags names to commit ids.
369 370 """
370 371 raise NotImplementedError
371 372
372 373 @LazyProperty
373 374 def tags(self):
374 375 """
375 376 A `dict` which maps tags names to commit ids.
376 377 """
377 378 raise NotImplementedError
378 379
379 380 @LazyProperty
380 381 def size(self):
381 382 """
382 383 Returns combined size in bytes for all repository files
383 384 """
384 385 tip = self.get_commit()
385 386 return tip.size
386 387
387 388 def size_at_commit(self, commit_id):
388 389 commit = self.get_commit(commit_id)
389 390 return commit.size
390 391
391 392 def _check_for_empty(self):
392 393 no_commits = len(self._commit_ids) == 0
393 394 if no_commits:
394 395 # check on remote to be sure
395 396 return self._remote.is_empty()
396 397 else:
397 398 return False
398 399
399 400 def is_empty(self):
400 401 if rhodecode.is_test:
401 402 return self._check_for_empty()
402 403
403 404 if self._is_empty is None:
404 405 # cache empty for production, but not tests
405 406 self._is_empty = self._check_for_empty()
406 407
407 408 return self._is_empty
408 409
409 410 @staticmethod
410 411 def check_url(url, config):
411 412 """
412 413 Function will check given url and try to verify if it's a valid
413 414 link.
414 415 """
415 416 raise NotImplementedError
416 417
417 418 @staticmethod
418 419 def is_valid_repository(path):
419 420 """
420 421 Check if given `path` contains a valid repository of this backend
421 422 """
422 423 raise NotImplementedError
423 424
424 425 # ==========================================================================
425 426 # COMMITS
426 427 # ==========================================================================
427 428
428 429 @CachedProperty
429 430 def commit_ids(self):
430 431 raise NotImplementedError
431 432
432 433 def append_commit_id(self, commit_id):
433 434 if commit_id not in self.commit_ids:
434 435 self._rebuild_cache(self.commit_ids + [commit_id])
435 436
436 437 # clear cache
437 438 self._invalidate_prop_cache('commit_ids')
438 439 self._is_empty = False
439 440
440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
441 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
442 translate_tag=None, maybe_unreachable=False):
441 443 """
442 444 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
443 445 are both None, most recent commit is returned.
444 446
445 447 :param pre_load: Optional. List of commit attributes to load.
446 448
447 449 :raises ``EmptyRepositoryError``: if there are no commits
448 450 """
449 451 raise NotImplementedError
450 452
451 453 def __iter__(self):
452 454 for commit_id in self.commit_ids:
453 455 yield self.get_commit(commit_id=commit_id)
454 456
455 457 def get_commits(
456 458 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
458 460 """
459 461 Returns iterator of `BaseCommit` objects from start to end
460 462 not inclusive. This should behave just like a list, ie. end is not
461 463 inclusive.
462 464
463 465 :param start_id: None or str, must be a valid commit id
464 466 :param end_id: None or str, must be a valid commit id
465 467 :param start_date:
466 468 :param end_date:
467 469 :param branch_name:
468 470 :param show_hidden:
469 471 :param pre_load:
470 472 :param translate_tags:
471 473 """
472 474 raise NotImplementedError
473 475
474 476 def __getitem__(self, key):
475 477 """
476 478 Allows index based access to the commit objects of this repository.
477 479 """
478 480 pre_load = ["author", "branch", "date", "message", "parents"]
479 481 if isinstance(key, slice):
480 482 return self._get_range(key, pre_load)
481 483 return self.get_commit(commit_idx=key, pre_load=pre_load)
482 484
483 485 def _get_range(self, slice_obj, pre_load):
484 486 for commit_id in self.commit_ids.__getitem__(slice_obj):
485 487 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
486 488
487 489 def count(self):
488 490 return len(self.commit_ids)
489 491
490 492 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
491 493 """
492 494 Creates and returns a tag for the given ``commit_id``.
493 495
494 496 :param name: name for new tag
495 497 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
496 498 :param commit_id: commit id for which new tag would be created
497 499 :param message: message of the tag's commit
498 500 :param date: date of tag's commit
499 501
500 502 :raises TagAlreadyExistError: if tag with same name already exists
501 503 """
502 504 raise NotImplementedError
503 505
504 506 def remove_tag(self, name, user, message=None, date=None):
505 507 """
506 508 Removes tag with the given ``name``.
507 509
508 510 :param name: name of the tag to be removed
509 511 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
510 512 :param message: message of the tag's removal commit
511 513 :param date: date of tag's removal commit
512 514
513 515 :raises TagDoesNotExistError: if tag with given name does not exists
514 516 """
515 517 raise NotImplementedError
516 518
517 519 def get_diff(
518 520 self, commit1, commit2, path=None, ignore_whitespace=False,
519 521 context=3, path1=None):
520 522 """
521 523 Returns (git like) *diff*, as plain text. Shows changes introduced by
522 524 `commit2` since `commit1`.
523 525
524 526 :param commit1: Entry point from which diff is shown. Can be
525 527 ``self.EMPTY_COMMIT`` - in this case, patch showing all
526 528 the changes since empty state of the repository until `commit2`
527 529 :param commit2: Until which commit changes should be shown.
528 530 :param path: Can be set to a path of a file to create a diff of that
529 531 file. If `path1` is also set, this value is only associated to
530 532 `commit2`.
531 533 :param ignore_whitespace: If set to ``True``, would not show whitespace
532 534 changes. Defaults to ``False``.
533 535 :param context: How many lines before/after changed lines should be
534 536 shown. Defaults to ``3``.
535 537 :param path1: Can be set to a path to associate with `commit1`. This
536 538 parameter works only for backends which support diff generation for
537 539 different paths. Other backends will raise a `ValueError` if `path1`
538 540 is set and has a different value than `path`.
539 541 :param file_path: filter this diff by given path pattern
540 542 """
541 543 raise NotImplementedError
542 544
543 545 def strip(self, commit_id, branch=None):
544 546 """
545 547 Strip given commit_id from the repository
546 548 """
547 549 raise NotImplementedError
548 550
549 551 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
550 552 """
551 553 Return a latest common ancestor commit if one exists for this repo
552 554 `commit_id1` vs `commit_id2` from `repo2`.
553 555
554 556 :param commit_id1: Commit it from this repository to use as a
555 557 target for the comparison.
556 558 :param commit_id2: Source commit id to use for comparison.
557 559 :param repo2: Source repository to use for comparison.
558 560 """
559 561 raise NotImplementedError
560 562
561 563 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
562 564 """
563 565 Compare this repository's revision `commit_id1` with `commit_id2`.
564 566
565 567 Returns a tuple(commits, ancestor) that would be merged from
566 568 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
567 569 will be returned as ancestor.
568 570
569 571 :param commit_id1: Commit it from this repository to use as a
570 572 target for the comparison.
571 573 :param commit_id2: Source commit id to use for comparison.
572 574 :param repo2: Source repository to use for comparison.
573 575 :param merge: If set to ``True`` will do a merge compare which also
574 576 returns the common ancestor.
575 577 :param pre_load: Optional. List of commit attributes to load.
576 578 """
577 579 raise NotImplementedError
578 580
579 581 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
580 582 user_name='', user_email='', message='', dry_run=False,
581 583 use_rebase=False, close_branch=False):
582 584 """
583 585 Merge the revisions specified in `source_ref` from `source_repo`
584 586 onto the `target_ref` of this repository.
585 587
586 588 `source_ref` and `target_ref` are named tupls with the following
587 589 fields `type`, `name` and `commit_id`.
588 590
589 591 Returns a MergeResponse named tuple with the following fields
590 592 'possible', 'executed', 'source_commit', 'target_commit',
591 593 'merge_commit'.
592 594
593 595 :param repo_id: `repo_id` target repo id.
594 596 :param workspace_id: `workspace_id` unique identifier.
595 597 :param target_ref: `target_ref` points to the commit on top of which
596 598 the `source_ref` should be merged.
597 599 :param source_repo: The repository that contains the commits to be
598 600 merged.
599 601 :param source_ref: `source_ref` points to the topmost commit from
600 602 the `source_repo` which should be merged.
601 603 :param user_name: Merge commit `user_name`.
602 604 :param user_email: Merge commit `user_email`.
603 605 :param message: Merge commit `message`.
604 606 :param dry_run: If `True` the merge will not take place.
605 607 :param use_rebase: If `True` commits from the source will be rebased
606 608 on top of the target instead of being merged.
607 609 :param close_branch: If `True` branch will be close before merging it
608 610 """
609 611 if dry_run:
610 612 message = message or settings.MERGE_DRY_RUN_MESSAGE
611 613 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
612 614 user_name = user_name or settings.MERGE_DRY_RUN_USER
613 615 else:
614 616 if not user_name:
615 617 raise ValueError('user_name cannot be empty')
616 618 if not user_email:
617 619 raise ValueError('user_email cannot be empty')
618 620 if not message:
619 621 raise ValueError('message cannot be empty')
620 622
621 623 try:
622 624 return self._merge_repo(
623 625 repo_id, workspace_id, target_ref, source_repo,
624 626 source_ref, message, user_name, user_email, dry_run=dry_run,
625 627 use_rebase=use_rebase, close_branch=close_branch)
626 628 except RepositoryError as exc:
627 629 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
628 630 return MergeResponse(
629 631 False, False, None, MergeFailureReason.UNKNOWN,
630 632 metadata={'exception': str(exc)})
631 633
632 634 def _merge_repo(self, repo_id, workspace_id, target_ref,
633 635 source_repo, source_ref, merge_message,
634 636 merger_name, merger_email, dry_run=False,
635 637 use_rebase=False, close_branch=False):
636 638 """Internal implementation of merge."""
637 639 raise NotImplementedError
638 640
639 641 def _maybe_prepare_merge_workspace(
640 642 self, repo_id, workspace_id, target_ref, source_ref):
641 643 """
642 644 Create the merge workspace.
643 645
644 646 :param workspace_id: `workspace_id` unique identifier.
645 647 """
646 648 raise NotImplementedError
647 649
648 650 @classmethod
649 651 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
650 652 """
651 653 Legacy version that was used before. We still need it for
652 654 backward compat
653 655 """
654 656 return os.path.join(
655 657 os.path.dirname(repo_path),
656 658 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
657 659
658 660 @classmethod
659 661 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
660 662 # The name of the shadow repository must start with '.', so it is
661 663 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
662 664 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
663 665 if os.path.exists(legacy_repository_path):
664 666 return legacy_repository_path
665 667 else:
666 668 return os.path.join(
667 669 os.path.dirname(repo_path),
668 670 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
669 671
670 672 def cleanup_merge_workspace(self, repo_id, workspace_id):
671 673 """
672 674 Remove merge workspace.
673 675
674 676 This function MUST not fail in case there is no workspace associated to
675 677 the given `workspace_id`.
676 678
677 679 :param workspace_id: `workspace_id` unique identifier.
678 680 """
679 681 shadow_repository_path = self._get_shadow_repository_path(
680 682 self.path, repo_id, workspace_id)
681 683 shadow_repository_path_del = '{}.{}.delete'.format(
682 684 shadow_repository_path, time.time())
683 685
684 686 # move the shadow repo, so it never conflicts with the one used.
685 687 # we use this method because shutil.rmtree had some edge case problems
686 688 # removing symlinked repositories
687 689 if not os.path.isdir(shadow_repository_path):
688 690 return
689 691
690 692 shutil.move(shadow_repository_path, shadow_repository_path_del)
691 693 try:
692 694 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
693 695 except Exception:
694 696 log.exception('Failed to gracefully remove shadow repo under %s',
695 697 shadow_repository_path_del)
696 698 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
697 699
698 700 # ========== #
699 701 # COMMIT API #
700 702 # ========== #
701 703
702 704 @LazyProperty
703 705 def in_memory_commit(self):
704 706 """
705 707 Returns :class:`InMemoryCommit` object for this repository.
706 708 """
707 709 raise NotImplementedError
708 710
709 711 # ======================== #
710 712 # UTILITIES FOR SUBCLASSES #
711 713 # ======================== #
712 714
713 715 def _validate_diff_commits(self, commit1, commit2):
714 716 """
715 717 Validates that the given commits are related to this repository.
716 718
717 719 Intended as a utility for sub classes to have a consistent validation
718 720 of input parameters in methods like :meth:`get_diff`.
719 721 """
720 722 self._validate_commit(commit1)
721 723 self._validate_commit(commit2)
722 724 if (isinstance(commit1, EmptyCommit) and
723 725 isinstance(commit2, EmptyCommit)):
724 726 raise ValueError("Cannot compare two empty commits")
725 727
726 728 def _validate_commit(self, commit):
727 729 if not isinstance(commit, BaseCommit):
728 730 raise TypeError(
729 731 "%s is not of type BaseCommit" % repr(commit))
730 732 if commit.repository != self and not isinstance(commit, EmptyCommit):
731 733 raise ValueError(
732 734 "Commit %s must be a valid commit from this repository %s, "
733 735 "related to this repository instead %s." %
734 736 (commit, self, commit.repository))
735 737
736 738 def _validate_commit_id(self, commit_id):
737 739 if not isinstance(commit_id, compat.string_types):
738 740 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
739 741
740 742 def _validate_commit_idx(self, commit_idx):
741 743 if not isinstance(commit_idx, (int, long)):
742 744 raise TypeError("commit_idx must be a numeric value")
743 745
744 746 def _validate_branch_name(self, branch_name):
745 747 if branch_name and branch_name not in self.branches_all:
746 748 msg = ("Branch %s not found in %s" % (branch_name, self))
747 749 raise BranchDoesNotExistError(msg)
748 750
749 751 #
750 752 # Supporting deprecated API parts
751 753 # TODO: johbo: consider to move this into a mixin
752 754 #
753 755
754 756 @property
755 757 def EMPTY_CHANGESET(self):
756 758 warnings.warn(
757 759 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
758 760 return self.EMPTY_COMMIT_ID
759 761
760 762 @property
761 763 def revisions(self):
762 764 warnings.warn("Use commits attribute instead", DeprecationWarning)
763 765 return self.commit_ids
764 766
765 767 @revisions.setter
766 768 def revisions(self, value):
767 769 warnings.warn("Use commits attribute instead", DeprecationWarning)
768 770 self.commit_ids = value
769 771
770 772 def get_changeset(self, revision=None, pre_load=None):
771 773 warnings.warn("Use get_commit instead", DeprecationWarning)
772 774 commit_id = None
773 775 commit_idx = None
774 776 if isinstance(revision, compat.string_types):
775 777 commit_id = revision
776 778 else:
777 779 commit_idx = revision
778 780 return self.get_commit(
779 781 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
780 782
781 783 def get_changesets(
782 784 self, start=None, end=None, start_date=None, end_date=None,
783 785 branch_name=None, pre_load=None):
784 786 warnings.warn("Use get_commits instead", DeprecationWarning)
785 787 start_id = self._revision_to_commit(start)
786 788 end_id = self._revision_to_commit(end)
787 789 return self.get_commits(
788 790 start_id=start_id, end_id=end_id, start_date=start_date,
789 791 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
790 792
791 793 def _revision_to_commit(self, revision):
792 794 """
793 795 Translates a revision to a commit_id
794 796
795 797 Helps to support the old changeset based API which allows to use
796 798 commit ids and commit indices interchangeable.
797 799 """
798 800 if revision is None:
799 801 return revision
800 802
801 803 if isinstance(revision, compat.string_types):
802 804 commit_id = revision
803 805 else:
804 806 commit_id = self.commit_ids[revision]
805 807 return commit_id
806 808
807 809 @property
808 810 def in_memory_changeset(self):
809 811 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
810 812 return self.in_memory_commit
811 813
812 814 def get_path_permissions(self, username):
813 815 """
814 816 Returns a path permission checker or None if not supported
815 817
816 818 :param username: session user name
817 819 :return: an instance of BasePathPermissionChecker or None
818 820 """
819 821 return None
820 822
821 823 def install_hooks(self, force=False):
822 824 return self._remote.install_hooks(force)
823 825
824 826 def get_hooks_info(self):
825 827 return self._remote.get_hooks_info()
826 828
827 829
828 830 class BaseCommit(object):
829 831 """
830 832 Each backend should implement it's commit representation.
831 833
832 834 **Attributes**
833 835
834 836 ``repository``
835 837 repository object within which commit exists
836 838
837 839 ``id``
838 840 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
839 841 just ``tip``.
840 842
841 843 ``raw_id``
842 844 raw commit representation (i.e. full 40 length sha for git
843 845 backend)
844 846
845 847 ``short_id``
846 848 shortened (if apply) version of ``raw_id``; it would be simple
847 849 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
848 850 as ``raw_id`` for subversion
849 851
850 852 ``idx``
851 853 commit index
852 854
853 855 ``files``
854 856 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
855 857
856 858 ``dirs``
857 859 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
858 860
859 861 ``nodes``
860 862 combined list of ``Node`` objects
861 863
862 864 ``author``
863 865 author of the commit, as unicode
864 866
865 867 ``message``
866 868 message of the commit, as unicode
867 869
868 870 ``parents``
869 871 list of parent commits
870 872
871 873 """
872 874
873 875 branch = None
874 876 """
875 877 Depending on the backend this should be set to the branch name of the
876 878 commit. Backends not supporting branches on commits should leave this
877 879 value as ``None``.
878 880 """
879 881
880 882 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
881 883 """
882 884 This template is used to generate a default prefix for repository archives
883 885 if no prefix has been specified.
884 886 """
885 887
886 888 def __str__(self):
887 889 return '<%s at %s:%s>' % (
888 890 self.__class__.__name__, self.idx, self.short_id)
889 891
890 892 def __repr__(self):
891 893 return self.__str__()
892 894
893 895 def __unicode__(self):
894 896 return u'%s:%s' % (self.idx, self.short_id)
895 897
896 898 def __eq__(self, other):
897 899 same_instance = isinstance(other, self.__class__)
898 900 return same_instance and self.raw_id == other.raw_id
899 901
900 902 def __json__(self):
901 903 parents = []
902 904 try:
903 905 for parent in self.parents:
904 906 parents.append({'raw_id': parent.raw_id})
905 907 except NotImplementedError:
906 908 # empty commit doesn't have parents implemented
907 909 pass
908 910
909 911 return {
910 912 'short_id': self.short_id,
911 913 'raw_id': self.raw_id,
912 914 'revision': self.idx,
913 915 'message': self.message,
914 916 'date': self.date,
915 917 'author': self.author,
916 918 'parents': parents,
917 919 'branch': self.branch
918 920 }
919 921
920 922 def __getstate__(self):
921 923 d = self.__dict__.copy()
922 924 d.pop('_remote', None)
923 925 d.pop('repository', None)
924 926 return d
925 927
926 928 def _get_refs(self):
927 929 return {
928 930 'branches': [self.branch] if self.branch else [],
929 931 'bookmarks': getattr(self, 'bookmarks', []),
930 932 'tags': self.tags
931 933 }
932 934
933 935 @LazyProperty
934 936 def last(self):
935 937 """
936 938 ``True`` if this is last commit in repository, ``False``
937 939 otherwise; trying to access this attribute while there is no
938 940 commits would raise `EmptyRepositoryError`
939 941 """
940 942 if self.repository is None:
941 943 raise CommitError("Cannot check if it's most recent commit")
942 944 return self.raw_id == self.repository.commit_ids[-1]
943 945
944 946 @LazyProperty
945 947 def parents(self):
946 948 """
947 949 Returns list of parent commits.
948 950 """
949 951 raise NotImplementedError
950 952
951 953 @LazyProperty
952 954 def first_parent(self):
953 955 """
954 956 Returns list of parent commits.
955 957 """
956 958 return self.parents[0] if self.parents else EmptyCommit()
957 959
958 960 @property
959 961 def merge(self):
960 962 """
961 963 Returns boolean if commit is a merge.
962 964 """
963 965 return len(self.parents) > 1
964 966
965 967 @LazyProperty
966 968 def children(self):
967 969 """
968 970 Returns list of child commits.
969 971 """
970 972 raise NotImplementedError
971 973
972 974 @LazyProperty
973 975 def id(self):
974 976 """
975 977 Returns string identifying this commit.
976 978 """
977 979 raise NotImplementedError
978 980
979 981 @LazyProperty
980 982 def raw_id(self):
981 983 """
982 984 Returns raw string identifying this commit.
983 985 """
984 986 raise NotImplementedError
985 987
986 988 @LazyProperty
987 989 def short_id(self):
988 990 """
989 991 Returns shortened version of ``raw_id`` attribute, as string,
990 992 identifying this commit, useful for presentation to users.
991 993 """
992 994 raise NotImplementedError
993 995
994 996 @LazyProperty
995 997 def idx(self):
996 998 """
997 999 Returns integer identifying this commit.
998 1000 """
999 1001 raise NotImplementedError
1000 1002
1001 1003 @LazyProperty
1002 1004 def committer(self):
1003 1005 """
1004 1006 Returns committer for this commit
1005 1007 """
1006 1008 raise NotImplementedError
1007 1009
1008 1010 @LazyProperty
1009 1011 def committer_name(self):
1010 1012 """
1011 1013 Returns committer name for this commit
1012 1014 """
1013 1015
1014 1016 return author_name(self.committer)
1015 1017
1016 1018 @LazyProperty
1017 1019 def committer_email(self):
1018 1020 """
1019 1021 Returns committer email address for this commit
1020 1022 """
1021 1023
1022 1024 return author_email(self.committer)
1023 1025
1024 1026 @LazyProperty
1025 1027 def author(self):
1026 1028 """
1027 1029 Returns author for this commit
1028 1030 """
1029 1031
1030 1032 raise NotImplementedError
1031 1033
1032 1034 @LazyProperty
1033 1035 def author_name(self):
1034 1036 """
1035 1037 Returns author name for this commit
1036 1038 """
1037 1039
1038 1040 return author_name(self.author)
1039 1041
1040 1042 @LazyProperty
1041 1043 def author_email(self):
1042 1044 """
1043 1045 Returns author email address for this commit
1044 1046 """
1045 1047
1046 1048 return author_email(self.author)
1047 1049
1048 1050 def get_file_mode(self, path):
1049 1051 """
1050 1052 Returns stat mode of the file at `path`.
1051 1053 """
1052 1054 raise NotImplementedError
1053 1055
1054 1056 def is_link(self, path):
1055 1057 """
1056 1058 Returns ``True`` if given `path` is a symlink
1057 1059 """
1058 1060 raise NotImplementedError
1059 1061
1060 1062 def is_node_binary(self, path):
1061 1063 """
1062 1064 Returns ``True`` is given path is a binary file
1063 1065 """
1064 1066 raise NotImplementedError
1065 1067
1066 1068 def get_file_content(self, path):
1067 1069 """
1068 1070 Returns content of the file at the given `path`.
1069 1071 """
1070 1072 raise NotImplementedError
1071 1073
1072 1074 def get_file_content_streamed(self, path):
1073 1075 """
1074 1076 returns a streaming response from vcsserver with file content
1075 1077 """
1076 1078 raise NotImplementedError
1077 1079
1078 1080 def get_file_size(self, path):
1079 1081 """
1080 1082 Returns size of the file at the given `path`.
1081 1083 """
1082 1084 raise NotImplementedError
1083 1085
1084 1086 def get_path_commit(self, path, pre_load=None):
1085 1087 """
1086 1088 Returns last commit of the file at the given `path`.
1087 1089
1088 1090 :param pre_load: Optional. List of commit attributes to load.
1089 1091 """
1090 1092 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1091 1093 if not commits:
1092 1094 raise RepositoryError(
1093 1095 'Failed to fetch history for path {}. '
1094 1096 'Please check if such path exists in your repository'.format(
1095 1097 path))
1096 1098 return commits[0]
1097 1099
1098 1100 def get_path_history(self, path, limit=None, pre_load=None):
1099 1101 """
1100 1102 Returns history of file as reversed list of :class:`BaseCommit`
1101 1103 objects for which file at given `path` has been modified.
1102 1104
1103 1105 :param limit: Optional. Allows to limit the size of the returned
1104 1106 history. This is intended as a hint to the underlying backend, so
1105 1107 that it can apply optimizations depending on the limit.
1106 1108 :param pre_load: Optional. List of commit attributes to load.
1107 1109 """
1108 1110 raise NotImplementedError
1109 1111
1110 1112 def get_file_annotate(self, path, pre_load=None):
1111 1113 """
1112 1114 Returns a generator of four element tuples with
1113 1115 lineno, sha, commit lazy loader and line
1114 1116
1115 1117 :param pre_load: Optional. List of commit attributes to load.
1116 1118 """
1117 1119 raise NotImplementedError
1118 1120
1119 1121 def get_nodes(self, path):
1120 1122 """
1121 1123 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1122 1124 state of commit at the given ``path``.
1123 1125
1124 1126 :raises ``CommitError``: if node at the given ``path`` is not
1125 1127 instance of ``DirNode``
1126 1128 """
1127 1129 raise NotImplementedError
1128 1130
1129 1131 def get_node(self, path):
1130 1132 """
1131 1133 Returns ``Node`` object from the given ``path``.
1132 1134
1133 1135 :raises ``NodeDoesNotExistError``: if there is no node at the given
1134 1136 ``path``
1135 1137 """
1136 1138 raise NotImplementedError
1137 1139
1138 1140 def get_largefile_node(self, path):
1139 1141 """
1140 1142 Returns the path to largefile from Mercurial/Git-lfs storage.
1141 1143 or None if it's not a largefile node
1142 1144 """
1143 1145 return None
1144 1146
1145 1147 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1146 1148 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1147 1149 """
1148 1150 Creates an archive containing the contents of the repository.
1149 1151
1150 1152 :param archive_dest_path: path to the file which to create the archive.
1151 1153 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1152 1154 :param prefix: name of root directory in archive.
1153 1155 Default is repository name and commit's short_id joined with dash:
1154 1156 ``"{repo_name}-{short_id}"``.
1155 1157 :param write_metadata: write a metadata file into archive.
1156 1158 :param mtime: custom modification time for archive creation, defaults
1157 1159 to time.time() if not given.
1158 1160 :param archive_at_path: pack files at this path (default '/')
1159 1161
1160 1162 :raise VCSError: If prefix has a problem.
1161 1163 """
1162 1164 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1163 1165 if kind not in allowed_kinds:
1164 1166 raise ImproperArchiveTypeError(
1165 1167 'Archive kind (%s) not supported use one of %s' %
1166 1168 (kind, allowed_kinds))
1167 1169
1168 1170 prefix = self._validate_archive_prefix(prefix)
1169 1171
1170 1172 mtime = mtime is not None or time.mktime(self.date.timetuple())
1171 1173
1172 1174 file_info = []
1173 1175 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1174 1176 for _r, _d, files in cur_rev.walk(archive_at_path):
1175 1177 for f in files:
1176 1178 f_path = os.path.join(prefix, f.path)
1177 1179 file_info.append(
1178 1180 (f_path, f.mode, f.is_link(), f.raw_bytes))
1179 1181
1180 1182 if write_metadata:
1181 1183 metadata = [
1182 1184 ('repo_name', self.repository.name),
1183 1185 ('commit_id', self.raw_id),
1184 1186 ('mtime', mtime),
1185 1187 ('branch', self.branch),
1186 1188 ('tags', ','.join(self.tags)),
1187 1189 ]
1188 1190 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1189 1191 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1190 1192
1191 1193 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1192 1194
1193 1195 def _validate_archive_prefix(self, prefix):
1194 1196 if prefix is None:
1195 1197 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1196 1198 repo_name=safe_str(self.repository.name),
1197 1199 short_id=self.short_id)
1198 1200 elif not isinstance(prefix, str):
1199 1201 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1200 1202 elif prefix.startswith('/'):
1201 1203 raise VCSError("Prefix cannot start with leading slash")
1202 1204 elif prefix.strip() == '':
1203 1205 raise VCSError("Prefix cannot be empty")
1204 1206 return prefix
1205 1207
1206 1208 @LazyProperty
1207 1209 def root(self):
1208 1210 """
1209 1211 Returns ``RootNode`` object for this commit.
1210 1212 """
1211 1213 return self.get_node('')
1212 1214
1213 1215 def next(self, branch=None):
1214 1216 """
1215 1217 Returns next commit from current, if branch is gives it will return
1216 1218 next commit belonging to this branch
1217 1219
1218 1220 :param branch: show commits within the given named branch
1219 1221 """
1220 1222 indexes = xrange(self.idx + 1, self.repository.count())
1221 1223 return self._find_next(indexes, branch)
1222 1224
1223 1225 def prev(self, branch=None):
1224 1226 """
1225 1227 Returns previous commit from current, if branch is gives it will
1226 1228 return previous commit belonging to this branch
1227 1229
1228 1230 :param branch: show commit within the given named branch
1229 1231 """
1230 1232 indexes = xrange(self.idx - 1, -1, -1)
1231 1233 return self._find_next(indexes, branch)
1232 1234
1233 1235 def _find_next(self, indexes, branch=None):
1234 1236 if branch and self.branch != branch:
1235 1237 raise VCSError('Branch option used on commit not belonging '
1236 1238 'to that branch')
1237 1239
1238 1240 for next_idx in indexes:
1239 1241 commit = self.repository.get_commit(commit_idx=next_idx)
1240 1242 if branch and branch != commit.branch:
1241 1243 continue
1242 1244 return commit
1243 1245 raise CommitDoesNotExistError
1244 1246
1245 1247 def diff(self, ignore_whitespace=True, context=3):
1246 1248 """
1247 1249 Returns a `Diff` object representing the change made by this commit.
1248 1250 """
1249 1251 parent = self.first_parent
1250 1252 diff = self.repository.get_diff(
1251 1253 parent, self,
1252 1254 ignore_whitespace=ignore_whitespace,
1253 1255 context=context)
1254 1256 return diff
1255 1257
1256 1258 @LazyProperty
1257 1259 def added(self):
1258 1260 """
1259 1261 Returns list of added ``FileNode`` objects.
1260 1262 """
1261 1263 raise NotImplementedError
1262 1264
1263 1265 @LazyProperty
1264 1266 def changed(self):
1265 1267 """
1266 1268 Returns list of modified ``FileNode`` objects.
1267 1269 """
1268 1270 raise NotImplementedError
1269 1271
1270 1272 @LazyProperty
1271 1273 def removed(self):
1272 1274 """
1273 1275 Returns list of removed ``FileNode`` objects.
1274 1276 """
1275 1277 raise NotImplementedError
1276 1278
1277 1279 @LazyProperty
1278 1280 def size(self):
1279 1281 """
1280 1282 Returns total number of bytes from contents of all filenodes.
1281 1283 """
1282 1284 return sum((node.size for node in self.get_filenodes_generator()))
1283 1285
1284 1286 def walk(self, topurl=''):
1285 1287 """
1286 1288 Similar to os.walk method. Insted of filesystem it walks through
1287 1289 commit starting at given ``topurl``. Returns generator of tuples
1288 1290 (topnode, dirnodes, filenodes).
1289 1291 """
1290 1292 topnode = self.get_node(topurl)
1291 1293 if not topnode.is_dir():
1292 1294 return
1293 1295 yield (topnode, topnode.dirs, topnode.files)
1294 1296 for dirnode in topnode.dirs:
1295 1297 for tup in self.walk(dirnode.path):
1296 1298 yield tup
1297 1299
1298 1300 def get_filenodes_generator(self):
1299 1301 """
1300 1302 Returns generator that yields *all* file nodes.
1301 1303 """
1302 1304 for topnode, dirs, files in self.walk():
1303 1305 for node in files:
1304 1306 yield node
1305 1307
1306 1308 #
1307 1309 # Utilities for sub classes to support consistent behavior
1308 1310 #
1309 1311
1310 1312 def no_node_at_path(self, path):
1311 1313 return NodeDoesNotExistError(
1312 1314 u"There is no file nor directory at the given path: "
1313 1315 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1314 1316
1315 1317 def _fix_path(self, path):
1316 1318 """
1317 1319 Paths are stored without trailing slash so we need to get rid off it if
1318 1320 needed.
1319 1321 """
1320 1322 return path.rstrip('/')
1321 1323
1322 1324 #
1323 1325 # Deprecated API based on changesets
1324 1326 #
1325 1327
1326 1328 @property
1327 1329 def revision(self):
1328 1330 warnings.warn("Use idx instead", DeprecationWarning)
1329 1331 return self.idx
1330 1332
1331 1333 @revision.setter
1332 1334 def revision(self, value):
1333 1335 warnings.warn("Use idx instead", DeprecationWarning)
1334 1336 self.idx = value
1335 1337
1336 1338 def get_file_changeset(self, path):
1337 1339 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1338 1340 return self.get_path_commit(path)
1339 1341
1340 1342
1341 1343 class BaseChangesetClass(type):
1342 1344
1343 1345 def __instancecheck__(self, instance):
1344 1346 return isinstance(instance, BaseCommit)
1345 1347
1346 1348
1347 1349 class BaseChangeset(BaseCommit):
1348 1350
1349 1351 __metaclass__ = BaseChangesetClass
1350 1352
1351 1353 def __new__(cls, *args, **kwargs):
1352 1354 warnings.warn(
1353 1355 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1354 1356 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1355 1357
1356 1358
1357 1359 class BaseInMemoryCommit(object):
1358 1360 """
1359 1361 Represents differences between repository's state (most recent head) and
1360 1362 changes made *in place*.
1361 1363
1362 1364 **Attributes**
1363 1365
1364 1366 ``repository``
1365 1367 repository object for this in-memory-commit
1366 1368
1367 1369 ``added``
1368 1370 list of ``FileNode`` objects marked as *added*
1369 1371
1370 1372 ``changed``
1371 1373 list of ``FileNode`` objects marked as *changed*
1372 1374
1373 1375 ``removed``
1374 1376 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1375 1377 *removed*
1376 1378
1377 1379 ``parents``
1378 1380 list of :class:`BaseCommit` instances representing parents of
1379 1381 in-memory commit. Should always be 2-element sequence.
1380 1382
1381 1383 """
1382 1384
1383 1385 def __init__(self, repository):
1384 1386 self.repository = repository
1385 1387 self.added = []
1386 1388 self.changed = []
1387 1389 self.removed = []
1388 1390 self.parents = []
1389 1391
1390 1392 def add(self, *filenodes):
1391 1393 """
1392 1394 Marks given ``FileNode`` objects as *to be committed*.
1393 1395
1394 1396 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1395 1397 latest commit
1396 1398 :raises ``NodeAlreadyAddedError``: if node with same path is already
1397 1399 marked as *added*
1398 1400 """
1399 1401 # Check if not already marked as *added* first
1400 1402 for node in filenodes:
1401 1403 if node.path in (n.path for n in self.added):
1402 1404 raise NodeAlreadyAddedError(
1403 1405 "Such FileNode %s is already marked for addition"
1404 1406 % node.path)
1405 1407 for node in filenodes:
1406 1408 self.added.append(node)
1407 1409
1408 1410 def change(self, *filenodes):
1409 1411 """
1410 1412 Marks given ``FileNode`` objects to be *changed* in next commit.
1411 1413
1412 1414 :raises ``EmptyRepositoryError``: if there are no commits yet
1413 1415 :raises ``NodeAlreadyExistsError``: if node with same path is already
1414 1416 marked to be *changed*
1415 1417 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1416 1418 marked to be *removed*
1417 1419 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1418 1420 commit
1419 1421 :raises ``NodeNotChangedError``: if node hasn't really be changed
1420 1422 """
1421 1423 for node in filenodes:
1422 1424 if node.path in (n.path for n in self.removed):
1423 1425 raise NodeAlreadyRemovedError(
1424 1426 "Node at %s is already marked as removed" % node.path)
1425 1427 try:
1426 1428 self.repository.get_commit()
1427 1429 except EmptyRepositoryError:
1428 1430 raise EmptyRepositoryError(
1429 1431 "Nothing to change - try to *add* new nodes rather than "
1430 1432 "changing them")
1431 1433 for node in filenodes:
1432 1434 if node.path in (n.path for n in self.changed):
1433 1435 raise NodeAlreadyChangedError(
1434 1436 "Node at '%s' is already marked as changed" % node.path)
1435 1437 self.changed.append(node)
1436 1438
1437 1439 def remove(self, *filenodes):
1438 1440 """
1439 1441 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1440 1442 *removed* in next commit.
1441 1443
1442 1444 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1443 1445 be *removed*
1444 1446 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1445 1447 be *changed*
1446 1448 """
1447 1449 for node in filenodes:
1448 1450 if node.path in (n.path for n in self.removed):
1449 1451 raise NodeAlreadyRemovedError(
1450 1452 "Node is already marked to for removal at %s" % node.path)
1451 1453 if node.path in (n.path for n in self.changed):
1452 1454 raise NodeAlreadyChangedError(
1453 1455 "Node is already marked to be changed at %s" % node.path)
1454 1456 # We only mark node as *removed* - real removal is done by
1455 1457 # commit method
1456 1458 self.removed.append(node)
1457 1459
1458 1460 def reset(self):
1459 1461 """
1460 1462 Resets this instance to initial state (cleans ``added``, ``changed``
1461 1463 and ``removed`` lists).
1462 1464 """
1463 1465 self.added = []
1464 1466 self.changed = []
1465 1467 self.removed = []
1466 1468 self.parents = []
1467 1469
1468 1470 def get_ipaths(self):
1469 1471 """
1470 1472 Returns generator of paths from nodes marked as added, changed or
1471 1473 removed.
1472 1474 """
1473 1475 for node in itertools.chain(self.added, self.changed, self.removed):
1474 1476 yield node.path
1475 1477
1476 1478 def get_paths(self):
1477 1479 """
1478 1480 Returns list of paths from nodes marked as added, changed or removed.
1479 1481 """
1480 1482 return list(self.get_ipaths())
1481 1483
1482 1484 def check_integrity(self, parents=None):
1483 1485 """
1484 1486 Checks in-memory commit's integrity. Also, sets parents if not
1485 1487 already set.
1486 1488
1487 1489 :raises CommitError: if any error occurs (i.e.
1488 1490 ``NodeDoesNotExistError``).
1489 1491 """
1490 1492 if not self.parents:
1491 1493 parents = parents or []
1492 1494 if len(parents) == 0:
1493 1495 try:
1494 1496 parents = [self.repository.get_commit(), None]
1495 1497 except EmptyRepositoryError:
1496 1498 parents = [None, None]
1497 1499 elif len(parents) == 1:
1498 1500 parents += [None]
1499 1501 self.parents = parents
1500 1502
1501 1503 # Local parents, only if not None
1502 1504 parents = [p for p in self.parents if p]
1503 1505
1504 1506 # Check nodes marked as added
1505 1507 for p in parents:
1506 1508 for node in self.added:
1507 1509 try:
1508 1510 p.get_node(node.path)
1509 1511 except NodeDoesNotExistError:
1510 1512 pass
1511 1513 else:
1512 1514 raise NodeAlreadyExistsError(
1513 1515 "Node `%s` already exists at %s" % (node.path, p))
1514 1516
1515 1517 # Check nodes marked as changed
1516 1518 missing = set(self.changed)
1517 1519 not_changed = set(self.changed)
1518 1520 if self.changed and not parents:
1519 1521 raise NodeDoesNotExistError(str(self.changed[0].path))
1520 1522 for p in parents:
1521 1523 for node in self.changed:
1522 1524 try:
1523 1525 old = p.get_node(node.path)
1524 1526 missing.remove(node)
1525 1527 # if content actually changed, remove node from not_changed
1526 1528 if old.content != node.content:
1527 1529 not_changed.remove(node)
1528 1530 except NodeDoesNotExistError:
1529 1531 pass
1530 1532 if self.changed and missing:
1531 1533 raise NodeDoesNotExistError(
1532 1534 "Node `%s` marked as modified but missing in parents: %s"
1533 1535 % (node.path, parents))
1534 1536
1535 1537 if self.changed and not_changed:
1536 1538 raise NodeNotChangedError(
1537 1539 "Node `%s` wasn't actually changed (parents: %s)"
1538 1540 % (not_changed.pop().path, parents))
1539 1541
1540 1542 # Check nodes marked as removed
1541 1543 if self.removed and not parents:
1542 1544 raise NodeDoesNotExistError(
1543 1545 "Cannot remove node at %s as there "
1544 1546 "were no parents specified" % self.removed[0].path)
1545 1547 really_removed = set()
1546 1548 for p in parents:
1547 1549 for node in self.removed:
1548 1550 try:
1549 1551 p.get_node(node.path)
1550 1552 really_removed.add(node)
1551 1553 except CommitError:
1552 1554 pass
1553 1555 not_removed = set(self.removed) - really_removed
1554 1556 if not_removed:
1555 1557 # TODO: johbo: This code branch does not seem to be covered
1556 1558 raise NodeDoesNotExistError(
1557 1559 "Cannot remove node at %s from "
1558 1560 "following parents: %s" % (not_removed, parents))
1559 1561
1560 1562 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1561 1563 """
1562 1564 Performs in-memory commit (doesn't check workdir in any way) and
1563 1565 returns newly created :class:`BaseCommit`. Updates repository's
1564 1566 attribute `commits`.
1565 1567
1566 1568 .. note::
1567 1569
1568 1570 While overriding this method each backend's should call
1569 1571 ``self.check_integrity(parents)`` in the first place.
1570 1572
1571 1573 :param message: message of the commit
1572 1574 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1573 1575 :param parents: single parent or sequence of parents from which commit
1574 1576 would be derived
1575 1577 :param date: ``datetime.datetime`` instance. Defaults to
1576 1578 ``datetime.datetime.now()``.
1577 1579 :param branch: branch name, as string. If none given, default backend's
1578 1580 branch would be used.
1579 1581
1580 1582 :raises ``CommitError``: if any error occurs while committing
1581 1583 """
1582 1584 raise NotImplementedError
1583 1585
1584 1586
1585 1587 class BaseInMemoryChangesetClass(type):
1586 1588
1587 1589 def __instancecheck__(self, instance):
1588 1590 return isinstance(instance, BaseInMemoryCommit)
1589 1591
1590 1592
1591 1593 class BaseInMemoryChangeset(BaseInMemoryCommit):
1592 1594
1593 1595 __metaclass__ = BaseInMemoryChangesetClass
1594 1596
1595 1597 def __new__(cls, *args, **kwargs):
1596 1598 warnings.warn(
1597 1599 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1598 1600 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1599 1601
1600 1602
1601 1603 class EmptyCommit(BaseCommit):
1602 1604 """
1603 1605 An dummy empty commit. It's possible to pass hash when creating
1604 1606 an EmptyCommit
1605 1607 """
1606 1608
1607 1609 def __init__(
1608 1610 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1609 1611 message='', author='', date=None):
1610 1612 self._empty_commit_id = commit_id
1611 1613 # TODO: johbo: Solve idx parameter, default value does not make
1612 1614 # too much sense
1613 1615 self.idx = idx
1614 1616 self.message = message
1615 1617 self.author = author
1616 1618 self.date = date or datetime.datetime.fromtimestamp(0)
1617 1619 self.repository = repo
1618 1620 self.alias = alias
1619 1621
1620 1622 @LazyProperty
1621 1623 def raw_id(self):
1622 1624 """
1623 1625 Returns raw string identifying this commit, useful for web
1624 1626 representation.
1625 1627 """
1626 1628
1627 1629 return self._empty_commit_id
1628 1630
1629 1631 @LazyProperty
1630 1632 def branch(self):
1631 1633 if self.alias:
1632 1634 from rhodecode.lib.vcs.backends import get_backend
1633 1635 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1634 1636
1635 1637 @LazyProperty
1636 1638 def short_id(self):
1637 1639 return self.raw_id[:12]
1638 1640
1639 1641 @LazyProperty
1640 1642 def id(self):
1641 1643 return self.raw_id
1642 1644
1643 1645 def get_path_commit(self, path):
1644 1646 return self
1645 1647
1646 1648 def get_file_content(self, path):
1647 1649 return u''
1648 1650
1649 1651 def get_file_content_streamed(self, path):
1650 1652 yield self.get_file_content()
1651 1653
1652 1654 def get_file_size(self, path):
1653 1655 return 0
1654 1656
1655 1657
1656 1658 class EmptyChangesetClass(type):
1657 1659
1658 1660 def __instancecheck__(self, instance):
1659 1661 return isinstance(instance, EmptyCommit)
1660 1662
1661 1663
1662 1664 class EmptyChangeset(EmptyCommit):
1663 1665
1664 1666 __metaclass__ = EmptyChangesetClass
1665 1667
1666 1668 def __new__(cls, *args, **kwargs):
1667 1669 warnings.warn(
1668 1670 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1669 1671 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1670 1672
1671 1673 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1672 1674 alias=None, revision=-1, message='', author='', date=None):
1673 1675 if requested_revision is not None:
1674 1676 warnings.warn(
1675 1677 "Parameter requested_revision not supported anymore",
1676 1678 DeprecationWarning)
1677 1679 super(EmptyChangeset, self).__init__(
1678 1680 commit_id=cs, repo=repo, alias=alias, idx=revision,
1679 1681 message=message, author=author, date=date)
1680 1682
1681 1683 @property
1682 1684 def revision(self):
1683 1685 warnings.warn("Use idx instead", DeprecationWarning)
1684 1686 return self.idx
1685 1687
1686 1688 @revision.setter
1687 1689 def revision(self, value):
1688 1690 warnings.warn("Use idx instead", DeprecationWarning)
1689 1691 self.idx = value
1690 1692
1691 1693
1692 1694 class EmptyRepository(BaseRepository):
1693 1695 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1694 1696 pass
1695 1697
1696 1698 def get_diff(self, *args, **kwargs):
1697 1699 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1698 1700 return GitDiff('')
1699 1701
1700 1702
1701 1703 class CollectionGenerator(object):
1702 1704
1703 1705 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1704 1706 self.repo = repo
1705 1707 self.commit_ids = commit_ids
1706 1708 # TODO: (oliver) this isn't currently hooked up
1707 1709 self.collection_size = None
1708 1710 self.pre_load = pre_load
1709 1711 self.translate_tag = translate_tag
1710 1712
1711 1713 def __len__(self):
1712 1714 if self.collection_size is not None:
1713 1715 return self.collection_size
1714 1716 return self.commit_ids.__len__()
1715 1717
1716 1718 def __iter__(self):
1717 1719 for commit_id in self.commit_ids:
1718 1720 # TODO: johbo: Mercurial passes in commit indices or commit ids
1719 1721 yield self._commit_factory(commit_id)
1720 1722
1721 1723 def _commit_factory(self, commit_id):
1722 1724 """
1723 1725 Allows backends to override the way commits are generated.
1724 1726 """
1725 1727 return self.repo.get_commit(
1726 1728 commit_id=commit_id, pre_load=self.pre_load,
1727 1729 translate_tag=self.translate_tag)
1728 1730
1729 1731 def __getslice__(self, i, j):
1730 1732 """
1731 1733 Returns an iterator of sliced repository
1732 1734 """
1733 1735 commit_ids = self.commit_ids[i:j]
1734 1736 return self.__class__(
1735 1737 self.repo, commit_ids, pre_load=self.pre_load,
1736 1738 translate_tag=self.translate_tag)
1737 1739
1738 1740 def __repr__(self):
1739 1741 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1740 1742
1741 1743
1742 1744 class Config(object):
1743 1745 """
1744 1746 Represents the configuration for a repository.
1745 1747
1746 1748 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1747 1749 standard library. It implements only the needed subset.
1748 1750 """
1749 1751
1750 1752 def __init__(self):
1751 1753 self._values = {}
1752 1754
1753 1755 def copy(self):
1754 1756 clone = Config()
1755 1757 for section, values in self._values.items():
1756 1758 clone._values[section] = values.copy()
1757 1759 return clone
1758 1760
1759 1761 def __repr__(self):
1760 1762 return '<Config(%s sections) at %s>' % (
1761 1763 len(self._values), hex(id(self)))
1762 1764
1763 1765 def items(self, section):
1764 1766 return self._values.get(section, {}).iteritems()
1765 1767
1766 1768 def get(self, section, option):
1767 1769 return self._values.get(section, {}).get(option)
1768 1770
1769 1771 def set(self, section, option, value):
1770 1772 section_values = self._values.setdefault(section, {})
1771 1773 section_values[option] = value
1772 1774
1773 1775 def clear_section(self, section):
1774 1776 self._values[section] = {}
1775 1777
1776 1778 def serialize(self):
1777 1779 """
1778 1780 Creates a list of three tuples (section, key, value) representing
1779 1781 this config object.
1780 1782 """
1781 1783 items = []
1782 1784 for section in self._values:
1783 1785 for option, value in self._values[section].items():
1784 1786 items.append(
1785 1787 (safe_str(section), safe_str(option), safe_str(value)))
1786 1788 return items
1787 1789
1788 1790
1789 1791 class Diff(object):
1790 1792 """
1791 1793 Represents a diff result from a repository backend.
1792 1794
1793 1795 Subclasses have to provide a backend specific value for
1794 1796 :attr:`_header_re` and :attr:`_meta_re`.
1795 1797 """
1796 1798 _meta_re = None
1797 1799 _header_re = None
1798 1800
1799 1801 def __init__(self, raw_diff):
1800 1802 self.raw = raw_diff
1801 1803
1802 1804 def chunks(self):
1803 1805 """
1804 1806 split the diff in chunks of separate --git a/file b/file chunks
1805 1807 to make diffs consistent we must prepend with \n, and make sure
1806 1808 we can detect last chunk as this was also has special rule
1807 1809 """
1808 1810
1809 1811 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1810 1812 header = diff_parts[0]
1811 1813
1812 1814 if self._meta_re:
1813 1815 match = self._meta_re.match(header)
1814 1816
1815 1817 chunks = diff_parts[1:]
1816 1818 total_chunks = len(chunks)
1817 1819
1818 1820 return (
1819 1821 DiffChunk(chunk, self, cur_chunk == total_chunks)
1820 1822 for cur_chunk, chunk in enumerate(chunks, start=1))
1821 1823
1822 1824
1823 1825 class DiffChunk(object):
1824 1826
1825 1827 def __init__(self, chunk, diff, last_chunk):
1826 1828 self._diff = diff
1827 1829
1828 1830 # since we split by \ndiff --git that part is lost from original diff
1829 1831 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1830 1832 if not last_chunk:
1831 1833 chunk += '\n'
1832 1834
1833 1835 match = self._diff._header_re.match(chunk)
1834 1836 self.header = match.groupdict()
1835 1837 self.diff = chunk[match.end():]
1836 1838 self.raw = chunk
1837 1839
1838 1840
1839 1841 class BasePathPermissionChecker(object):
1840 1842
1841 1843 @staticmethod
1842 1844 def create_from_patterns(includes, excludes):
1843 1845 if includes and '*' in includes and not excludes:
1844 1846 return AllPathPermissionChecker()
1845 1847 elif excludes and '*' in excludes:
1846 1848 return NonePathPermissionChecker()
1847 1849 else:
1848 1850 return PatternPathPermissionChecker(includes, excludes)
1849 1851
1850 1852 @property
1851 1853 def has_full_access(self):
1852 1854 raise NotImplemented()
1853 1855
1854 1856 def has_access(self, path):
1855 1857 raise NotImplemented()
1856 1858
1857 1859
1858 1860 class AllPathPermissionChecker(BasePathPermissionChecker):
1859 1861
1860 1862 @property
1861 1863 def has_full_access(self):
1862 1864 return True
1863 1865
1864 1866 def has_access(self, path):
1865 1867 return True
1866 1868
1867 1869
1868 1870 class NonePathPermissionChecker(BasePathPermissionChecker):
1869 1871
1870 1872 @property
1871 1873 def has_full_access(self):
1872 1874 return False
1873 1875
1874 1876 def has_access(self, path):
1875 1877 return False
1876 1878
1877 1879
1878 1880 class PatternPathPermissionChecker(BasePathPermissionChecker):
1879 1881
1880 1882 def __init__(self, includes, excludes):
1881 1883 self.includes = includes
1882 1884 self.excludes = excludes
1883 1885 self.includes_re = [] if not includes else [
1884 1886 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1885 1887 self.excludes_re = [] if not excludes else [
1886 1888 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1887 1889
1888 1890 @property
1889 1891 def has_full_access(self):
1890 1892 return '*' in self.includes and not self.excludes
1891 1893
1892 1894 def has_access(self, path):
1893 1895 for regex in self.excludes_re:
1894 1896 if regex.match(path):
1895 1897 return False
1896 1898 for regex in self.includes_re:
1897 1899 if regex.match(path):
1898 1900 return True
1899 1901 return False
@@ -1,1017 +1,1029 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, path as vcspath
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference)
40 40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitDoesNotExistError, EmptyRepositoryError,
45 45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46 46
47 47
48 48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class GitRepository(BaseRepository):
54 54 """
55 55 Git repository backend.
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'master'
58 58
59 59 contact = BaseRepository.DEFAULT_CONTACT
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 do_workspace_checkout=False, with_wire=None, bare=False):
63 63
64 64 self.path = safe_str(os.path.abspath(repo_path))
65 65 self.config = config if config else self.get_default_config()
66 66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 67
68 68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 69
70 70 # caches
71 71 self._commit_ids = {}
72 72
73 73 @LazyProperty
74 74 def _remote(self):
75 75 repo_id = self.path
76 76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 77
78 78 @LazyProperty
79 79 def bare(self):
80 80 return self._remote.bare()
81 81
82 82 @LazyProperty
83 83 def head(self):
84 84 return self._remote.head()
85 85
86 86 @CachedProperty
87 87 def commit_ids(self):
88 88 """
89 89 Returns list of commit ids, in ascending order. Being lazy
90 90 attribute allows external tools to inject commit ids from cache.
91 91 """
92 92 commit_ids = self._get_all_commit_ids()
93 93 self._rebuild_cache(commit_ids)
94 94 return commit_ids
95 95
96 96 def _rebuild_cache(self, commit_ids):
97 97 self._commit_ids = dict((commit_id, index)
98 98 for index, commit_id in enumerate(commit_ids))
99 99
100 100 def run_git_command(self, cmd, **opts):
101 101 """
102 102 Runs given ``cmd`` as git command and returns tuple
103 103 (stdout, stderr).
104 104
105 105 :param cmd: git command to be executed
106 106 :param opts: env options to pass into Subprocess command
107 107 """
108 108 if not isinstance(cmd, list):
109 109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 110
111 111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 112 out, err = self._remote.run_git_command(cmd, **opts)
113 113 if err and not skip_stderr_log:
114 114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 115 return out, err
116 116
117 117 @staticmethod
118 118 def check_url(url, config):
119 119 """
120 120 Function will check given url and try to verify if it's a valid
121 121 link. Sometimes it may happened that git will issue basic
122 122 auth request that can cause whole API to hang when used from python
123 123 or other external calls.
124 124
125 125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 126 when the return code is non 200
127 127 """
128 128 # check first if it's not an url
129 129 if os.path.isdir(url) or url.startswith('file:'):
130 130 return True
131 131
132 132 if '+' in url.split('://', 1)[0]:
133 133 url = url.split('+', 1)[1]
134 134
135 135 # Request the _remote to verify the url
136 136 return connection.Git.check_url(url, config.serialize())
137 137
138 138 @staticmethod
139 139 def is_valid_repository(path):
140 140 if os.path.isdir(os.path.join(path, '.git')):
141 141 return True
142 142 # check case of bare repository
143 143 try:
144 144 GitRepository(path)
145 145 return True
146 146 except VCSError:
147 147 pass
148 148 return False
149 149
150 150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 151 bare=False):
152 152 if create and os.path.exists(self.path):
153 153 raise RepositoryError(
154 154 "Cannot create repository at %s, location already exist"
155 155 % self.path)
156 156
157 157 if bare and do_workspace_checkout:
158 158 raise RepositoryError("Cannot update a bare repository")
159 159 try:
160 160
161 161 if src_url:
162 162 # check URL before any actions
163 163 GitRepository.check_url(src_url, self.config)
164 164
165 165 if create:
166 166 os.makedirs(self.path, mode=0o755)
167 167
168 168 if bare:
169 169 self._remote.init_bare()
170 170 else:
171 171 self._remote.init()
172 172
173 173 if src_url and bare:
174 174 # bare repository only allows a fetch and checkout is not allowed
175 175 self.fetch(src_url, commit_ids=None)
176 176 elif src_url:
177 177 self.pull(src_url, commit_ids=None,
178 178 update_after=do_workspace_checkout)
179 179
180 180 else:
181 181 if not self._remote.assert_correct_path():
182 182 raise RepositoryError(
183 183 'Path "%s" does not contain a Git repository' %
184 184 (self.path,))
185 185
186 186 # TODO: johbo: check if we have to translate the OSError here
187 187 except OSError as err:
188 188 raise RepositoryError(err)
189 189
190 190 def _get_all_commit_ids(self):
191 191 return self._remote.get_all_commit_ids()
192 192
193 193 def _get_commit_ids(self, filters=None):
194 194 # we must check if this repo is not empty, since later command
195 195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 196 # errors
197 197
198 198 head = self._remote.head(show_exc=False)
199 199
200 200 if not head:
201 201 return []
202 202
203 203 rev_filter = ['--branches', '--tags']
204 204 extra_filter = []
205 205
206 206 if filters:
207 207 if filters.get('since'):
208 208 extra_filter.append('--since=%s' % (filters['since']))
209 209 if filters.get('until'):
210 210 extra_filter.append('--until=%s' % (filters['until']))
211 211 if filters.get('branch_name'):
212 212 rev_filter = []
213 213 extra_filter.append(filters['branch_name'])
214 214 rev_filter.extend(extra_filter)
215 215
216 216 # if filters.get('start') or filters.get('end'):
217 217 # # skip is offset, max-count is limit
218 218 # if filters.get('start'):
219 219 # extra_filter += ' --skip=%s' % filters['start']
220 220 # if filters.get('end'):
221 221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 222
223 223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 224 try:
225 225 output, __ = self.run_git_command(cmd)
226 226 except RepositoryError:
227 227 # Can be raised for empty repositories
228 228 return []
229 229 return output.splitlines()
230 230
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False):
232 232 def is_null(value):
233 233 return len(value) == commit_id_or_idx.count('0')
234 234
235 235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 236 return self.commit_ids[-1]
237
237 238 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 239 *map(safe_str, [commit_id_or_idx, self.name]))
239 240
240 241 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 242 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 243 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 244 try:
244 245 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 246 except Exception:
246 247 raise CommitDoesNotExistError(commit_missing_err)
247 248
248 249 elif is_bstr:
249 250 # Need to call remote to translate id for tagging scenario
250 251 try:
251 remote_data = self._remote.get_object(commit_id_or_idx)
252 remote_data = self._remote.get_object(commit_id_or_idx,
253 maybe_unreachable=maybe_unreachable)
252 254 commit_id_or_idx = remote_data["commit_id"]
253 255 except (CommitDoesNotExistError,):
254 256 raise CommitDoesNotExistError(commit_missing_err)
255 257
256 258 # Ensure we return full id
257 259 if not SHA_PATTERN.match(str(commit_id_or_idx)):
258 260 raise CommitDoesNotExistError(
259 261 "Given commit id %s not recognized" % commit_id_or_idx)
260 262 return commit_id_or_idx
261 263
262 264 def get_hook_location(self):
263 265 """
264 266 returns absolute path to location where hooks are stored
265 267 """
266 268 loc = os.path.join(self.path, 'hooks')
267 269 if not self.bare:
268 270 loc = os.path.join(self.path, '.git', 'hooks')
269 271 return loc
270 272
271 273 @LazyProperty
272 274 def last_change(self):
273 275 """
274 276 Returns last change made on this repository as
275 277 `datetime.datetime` object.
276 278 """
277 279 try:
278 280 return self.get_commit().date
279 281 except RepositoryError:
280 282 tzoffset = makedate()[1]
281 283 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
282 284
283 285 def _get_fs_mtime(self):
284 286 idx_loc = '' if self.bare else '.git'
285 287 # fallback to filesystem
286 288 in_path = os.path.join(self.path, idx_loc, "index")
287 289 he_path = os.path.join(self.path, idx_loc, "HEAD")
288 290 if os.path.exists(in_path):
289 291 return os.stat(in_path).st_mtime
290 292 else:
291 293 return os.stat(he_path).st_mtime
292 294
293 295 @LazyProperty
294 296 def description(self):
295 297 description = self._remote.get_description()
296 298 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
297 299
298 300 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
299 301 if self.is_empty():
300 302 return OrderedDict()
301 303
302 304 result = []
303 305 for ref, sha in self._refs.iteritems():
304 306 if ref.startswith(prefix):
305 307 ref_name = ref
306 308 if strip_prefix:
307 309 ref_name = ref[len(prefix):]
308 310 result.append((safe_unicode(ref_name), sha))
309 311
310 312 def get_name(entry):
311 313 return entry[0]
312 314
313 315 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
314 316
315 317 def _get_branches(self):
316 318 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
317 319
318 320 @CachedProperty
319 321 def branches(self):
320 322 return self._get_branches()
321 323
322 324 @CachedProperty
323 325 def branches_closed(self):
324 326 return {}
325 327
326 328 @CachedProperty
327 329 def bookmarks(self):
328 330 return {}
329 331
330 332 @CachedProperty
331 333 def branches_all(self):
332 334 all_branches = {}
333 335 all_branches.update(self.branches)
334 336 all_branches.update(self.branches_closed)
335 337 return all_branches
336 338
337 339 @CachedProperty
338 340 def tags(self):
339 341 return self._get_tags()
340 342
341 343 def _get_tags(self):
342 344 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
343 345
344 346 def tag(self, name, user, commit_id=None, message=None, date=None,
345 347 **kwargs):
346 348 # TODO: fix this method to apply annotated tags correct with message
347 349 """
348 350 Creates and returns a tag for the given ``commit_id``.
349 351
350 352 :param name: name for new tag
351 353 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 354 :param commit_id: commit id for which new tag would be created
353 355 :param message: message of the tag's commit
354 356 :param date: date of tag's commit
355 357
356 358 :raises TagAlreadyExistError: if tag with same name already exists
357 359 """
358 360 if name in self.tags:
359 361 raise TagAlreadyExistError("Tag %s already exists" % name)
360 362 commit = self.get_commit(commit_id=commit_id)
361 363 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
362 364
363 365 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
364 366
365 367 self._invalidate_prop_cache('tags')
366 368 self._invalidate_prop_cache('_refs')
367 369
368 370 return commit
369 371
370 372 def remove_tag(self, name, user, message=None, date=None):
371 373 """
372 374 Removes tag with the given ``name``.
373 375
374 376 :param name: name of the tag to be removed
375 377 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 378 :param message: message of the tag's removal commit
377 379 :param date: date of tag's removal commit
378 380
379 381 :raises TagDoesNotExistError: if tag with given name does not exists
380 382 """
381 383 if name not in self.tags:
382 384 raise TagDoesNotExistError("Tag %s does not exist" % name)
383 385
384 386 self._remote.tag_remove(name)
385 387 self._invalidate_prop_cache('tags')
386 388 self._invalidate_prop_cache('_refs')
387 389
388 390 def _get_refs(self):
389 391 return self._remote.get_refs()
390 392
391 393 @CachedProperty
392 394 def _refs(self):
393 395 return self._get_refs()
394 396
395 397 @property
396 398 def _ref_tree(self):
397 399 node = tree = {}
398 400 for ref, sha in self._refs.iteritems():
399 401 path = ref.split('/')
400 402 for bit in path[:-1]:
401 403 node = node.setdefault(bit, {})
402 404 node[path[-1]] = sha
403 405 node = tree
404 406 return tree
405 407
406 408 def get_remote_ref(self, ref_name):
407 409 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
408 410 try:
409 411 return self._refs[ref_key]
410 412 except Exception:
411 413 return
412 414
413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
415 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
416 translate_tag=True, maybe_unreachable=False):
414 417 """
415 418 Returns `GitCommit` object representing commit from git repository
416 419 at the given `commit_id` or head (most recent commit) if None given.
417 420 """
418 421 if self.is_empty():
419 422 raise EmptyRepositoryError("There are no commits yet")
420 423
421 424 if commit_id is not None:
422 425 self._validate_commit_id(commit_id)
423 426 try:
424 427 # we have cached idx, use it without contacting the remote
425 428 idx = self._commit_ids[commit_id]
426 429 return GitCommit(self, commit_id, idx, pre_load=pre_load)
427 430 except KeyError:
428 431 pass
429 432
430 433 elif commit_idx is not None:
431 434 self._validate_commit_idx(commit_idx)
432 435 try:
433 436 _commit_id = self.commit_ids[commit_idx]
434 437 if commit_idx < 0:
435 438 commit_idx = self.commit_ids.index(_commit_id)
436 439 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
437 440 except IndexError:
438 441 commit_id = commit_idx
439 442 else:
440 443 commit_id = "tip"
441 444
442 445 if translate_tag:
443 commit_id = self._lookup_commit(commit_id)
446 commit_id = self._lookup_commit(commit_id, maybe_unreachable=maybe_unreachable)
444 447
445 448 try:
446 449 idx = self._commit_ids[commit_id]
447 450 except KeyError:
448 451 idx = -1
449 452
450 453 return GitCommit(self, commit_id, idx, pre_load=pre_load)
451 454
452 455 def get_commits(
453 456 self, start_id=None, end_id=None, start_date=None, end_date=None,
454 457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
455 458 """
456 459 Returns generator of `GitCommit` objects from start to end (both
457 460 are inclusive), in ascending date order.
458 461
459 462 :param start_id: None, str(commit_id)
460 463 :param end_id: None, str(commit_id)
461 464 :param start_date: if specified, commits with commit date less than
462 465 ``start_date`` would be filtered out from returned set
463 466 :param end_date: if specified, commits with commit date greater than
464 467 ``end_date`` would be filtered out from returned set
465 468 :param branch_name: if specified, commits not reachable from given
466 469 branch would be filtered out from returned set
467 470 :param show_hidden: Show hidden commits such as obsolete or hidden from
468 471 Mercurial evolve
469 472 :raise BranchDoesNotExistError: If given `branch_name` does not
470 473 exist.
471 474 :raise CommitDoesNotExistError: If commits for given `start` or
472 475 `end` could not be found.
473 476
474 477 """
475 478 if self.is_empty():
476 479 raise EmptyRepositoryError("There are no commits yet")
477 480
478 481 self._validate_branch_name(branch_name)
479 482
480 483 if start_id is not None:
481 484 self._validate_commit_id(start_id)
482 485 if end_id is not None:
483 486 self._validate_commit_id(end_id)
484 487
485 488 start_raw_id = self._lookup_commit(start_id)
486 489 start_pos = self._commit_ids[start_raw_id] if start_id else None
487 490 end_raw_id = self._lookup_commit(end_id)
488 491 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
489 492
490 493 if None not in [start_id, end_id] and start_pos > end_pos:
491 494 raise RepositoryError(
492 495 "Start commit '%s' cannot be after end commit '%s'" %
493 496 (start_id, end_id))
494 497
495 498 if end_pos is not None:
496 499 end_pos += 1
497 500
498 501 filter_ = []
499 502 if branch_name:
500 503 filter_.append({'branch_name': branch_name})
501 504 if start_date and not end_date:
502 505 filter_.append({'since': start_date})
503 506 if end_date and not start_date:
504 507 filter_.append({'until': end_date})
505 508 if start_date and end_date:
506 509 filter_.append({'since': start_date})
507 510 filter_.append({'until': end_date})
508 511
509 512 # if start_pos or end_pos:
510 513 # filter_.append({'start': start_pos})
511 514 # filter_.append({'end': end_pos})
512 515
513 516 if filter_:
514 517 revfilters = {
515 518 'branch_name': branch_name,
516 519 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
517 520 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
518 521 'start': start_pos,
519 522 'end': end_pos,
520 523 }
521 524 commit_ids = self._get_commit_ids(filters=revfilters)
522 525
523 526 else:
524 527 commit_ids = self.commit_ids
525 528
526 529 if start_pos or end_pos:
527 530 commit_ids = commit_ids[start_pos: end_pos]
528 531
529 532 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
530 533 translate_tag=translate_tags)
531 534
532 535 def get_diff(
533 536 self, commit1, commit2, path='', ignore_whitespace=False,
534 537 context=3, path1=None):
535 538 """
536 539 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 540 ``commit2`` since ``commit1``.
538 541
539 542 :param commit1: Entry point from which diff is shown. Can be
540 543 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 544 the changes since empty state of the repository until ``commit2``
542 545 :param commit2: Until which commits changes should be shown.
543 546 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 547 changes. Defaults to ``False``.
545 548 :param context: How many lines before/after changed lines should be
546 549 shown. Defaults to ``3``.
547 550 """
548 551 self._validate_diff_commits(commit1, commit2)
549 552 if path1 is not None and path1 != path:
550 553 raise ValueError("Diff of two different paths not supported.")
551 554
552 555 if path:
553 556 file_filter = path
554 557 else:
555 558 file_filter = None
556 559
557 560 diff = self._remote.diff(
558 561 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
559 562 opt_ignorews=ignore_whitespace,
560 563 context=context)
561 564 return GitDiff(diff)
562 565
563 566 def strip(self, commit_id, branch_name):
564 567 commit = self.get_commit(commit_id=commit_id)
565 568 if commit.merge:
566 569 raise Exception('Cannot reset to merge commit')
567 570
568 571 # parent is going to be the new head now
569 572 commit = commit.parents[0]
570 573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
571 574
572 575 # clear cached properties
573 576 self._invalidate_prop_cache('commit_ids')
574 577 self._invalidate_prop_cache('_refs')
575 578 self._invalidate_prop_cache('branches')
576 579
577 580 return len(self.commit_ids)
578 581
579 582 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
580 583 if commit_id1 == commit_id2:
581 584 return commit_id1
582 585
583 586 if self != repo2:
584 587 commits = self._remote.get_missing_revs(
585 588 commit_id1, commit_id2, repo2.path)
586 589 if commits:
587 590 commit = repo2.get_commit(commits[-1])
588 591 if commit.parents:
589 592 ancestor_id = commit.parents[0].raw_id
590 593 else:
591 594 ancestor_id = None
592 595 else:
593 596 # no commits from other repo, ancestor_id is the commit_id2
594 597 ancestor_id = commit_id2
595 598 else:
596 599 output, __ = self.run_git_command(
597 600 ['merge-base', commit_id1, commit_id2])
598 601 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
599 602
600 603 return ancestor_id
601 604
602 605 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
603 606 repo1 = self
604 607 ancestor_id = None
605 608
606 609 if commit_id1 == commit_id2:
607 610 commits = []
608 611 elif repo1 != repo2:
609 612 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
610 613 repo2.path)
611 614 commits = [
612 615 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
613 616 for commit_id in reversed(missing_ids)]
614 617 else:
615 618 output, __ = repo1.run_git_command(
616 619 ['log', '--reverse', '--pretty=format: %H', '-s',
617 620 '%s..%s' % (commit_id1, commit_id2)])
618 621 commits = [
619 622 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
620 623 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
621 624
622 625 return commits
623 626
624 627 @LazyProperty
625 628 def in_memory_commit(self):
626 629 """
627 630 Returns ``GitInMemoryCommit`` object for this repository.
628 631 """
629 632 return GitInMemoryCommit(self)
630 633
631 634 def pull(self, url, commit_ids=None, update_after=False):
632 635 """
633 636 Pull changes from external location. Pull is different in GIT
634 637 that fetch since it's doing a checkout
635 638
636 639 :param commit_ids: Optional. Can be set to a list of commit ids
637 640 which shall be pulled from the other repository.
638 641 """
639 642 refs = None
640 643 if commit_ids is not None:
641 644 remote_refs = self._remote.get_remote_refs(url)
642 645 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
643 646 self._remote.pull(url, refs=refs, update_after=update_after)
644 647 self._remote.invalidate_vcs_cache()
645 648
646 649 def fetch(self, url, commit_ids=None):
647 650 """
648 651 Fetch all git objects from external location.
649 652 """
650 653 self._remote.sync_fetch(url, refs=commit_ids)
651 654 self._remote.invalidate_vcs_cache()
652 655
653 656 def push(self, url):
654 657 refs = None
655 658 self._remote.sync_push(url, refs=refs)
656 659
657 660 def set_refs(self, ref_name, commit_id):
658 661 self._remote.set_refs(ref_name, commit_id)
659 662 self._invalidate_prop_cache('_refs')
660 663
661 664 def remove_ref(self, ref_name):
662 665 self._remote.remove_ref(ref_name)
663 666 self._invalidate_prop_cache('_refs')
664 667
668 def run_gc(self, prune=True):
669 cmd = ['gc', '--aggressive']
670 if prune:
671 cmd += ['--prune=now']
672 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
673 return stderr
674
665 675 def _update_server_info(self):
666 676 """
667 677 runs gits update-server-info command in this repo instance
668 678 """
669 679 self._remote.update_server_info()
670 680
671 681 def _current_branch(self):
672 682 """
673 683 Return the name of the current branch.
674 684
675 685 It only works for non bare repositories (i.e. repositories with a
676 686 working copy)
677 687 """
678 688 if self.bare:
679 689 raise RepositoryError('Bare git repos do not have active branches')
680 690
681 691 if self.is_empty():
682 692 return None
683 693
684 694 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
685 695 return stdout.strip()
686 696
687 697 def _checkout(self, branch_name, create=False, force=False):
688 698 """
689 699 Checkout a branch in the working directory.
690 700
691 701 It tries to create the branch if create is True, failing if the branch
692 702 already exists.
693 703
694 704 It only works for non bare repositories (i.e. repositories with a
695 705 working copy)
696 706 """
697 707 if self.bare:
698 708 raise RepositoryError('Cannot checkout branches in a bare git repo')
699 709
700 710 cmd = ['checkout']
701 711 if force:
702 712 cmd.append('-f')
703 713 if create:
704 714 cmd.append('-b')
705 715 cmd.append(branch_name)
706 716 self.run_git_command(cmd, fail_on_stderr=False)
707 717
708 718 def _create_branch(self, branch_name, commit_id):
709 719 """
710 720 creates a branch in a GIT repo
711 721 """
712 722 self._remote.create_branch(branch_name, commit_id)
713 723
714 724 def _identify(self):
715 725 """
716 726 Return the current state of the working directory.
717 727 """
718 728 if self.bare:
719 729 raise RepositoryError('Bare git repos do not have active branches')
720 730
721 731 if self.is_empty():
722 732 return None
723 733
724 734 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
725 735 return stdout.strip()
726 736
727 737 def _local_clone(self, clone_path, branch_name, source_branch=None):
728 738 """
729 739 Create a local clone of the current repo.
730 740 """
731 741 # N.B.(skreft): the --branch option is required as otherwise the shallow
732 742 # clone will only fetch the active branch.
733 743 cmd = ['clone', '--branch', branch_name,
734 744 self.path, os.path.abspath(clone_path)]
735 745
736 746 self.run_git_command(cmd, fail_on_stderr=False)
737 747
738 748 # if we get the different source branch, make sure we also fetch it for
739 749 # merge conditions
740 750 if source_branch and source_branch != branch_name:
741 751 # check if the ref exists.
742 752 shadow_repo = GitRepository(os.path.abspath(clone_path))
743 753 if shadow_repo.get_remote_ref(source_branch):
744 754 cmd = ['fetch', self.path, source_branch]
745 755 self.run_git_command(cmd, fail_on_stderr=False)
746 756
747 757 def _local_fetch(self, repository_path, branch_name, use_origin=False):
748 758 """
749 759 Fetch a branch from a local repository.
750 760 """
751 761 repository_path = os.path.abspath(repository_path)
752 762 if repository_path == self.path:
753 763 raise ValueError('Cannot fetch from the same repository')
754 764
755 765 if use_origin:
756 766 branch_name = '+{branch}:refs/heads/{branch}'.format(
757 767 branch=branch_name)
758 768
759 769 cmd = ['fetch', '--no-tags', '--update-head-ok',
760 770 repository_path, branch_name]
761 771 self.run_git_command(cmd, fail_on_stderr=False)
762 772
763 773 def _local_reset(self, branch_name):
764 774 branch_name = '{}'.format(branch_name)
765 775 cmd = ['reset', '--hard', branch_name, '--']
766 776 self.run_git_command(cmd, fail_on_stderr=False)
767 777
768 778 def _last_fetch_heads(self):
769 779 """
770 780 Return the last fetched heads that need merging.
771 781
772 782 The algorithm is defined at
773 783 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
774 784 """
775 785 if not self.bare:
776 786 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
777 787 else:
778 788 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
779 789
780 790 heads = []
781 791 with open(fetch_heads_path) as f:
782 792 for line in f:
783 793 if ' not-for-merge ' in line:
784 794 continue
785 795 line = re.sub('\t.*', '', line, flags=re.DOTALL)
786 796 heads.append(line)
787 797
788 798 return heads
789 799
790 800 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
791 801 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
792 802
793 803 def _local_pull(self, repository_path, branch_name, ff_only=True):
794 804 """
795 805 Pull a branch from a local repository.
796 806 """
797 807 if self.bare:
798 808 raise RepositoryError('Cannot pull into a bare git repository')
799 809 # N.B.(skreft): The --ff-only option is to make sure this is a
800 810 # fast-forward (i.e., we are only pulling new changes and there are no
801 811 # conflicts with our current branch)
802 812 # Additionally, that option needs to go before --no-tags, otherwise git
803 813 # pull complains about it being an unknown flag.
804 814 cmd = ['pull']
805 815 if ff_only:
806 816 cmd.append('--ff-only')
807 817 cmd.extend(['--no-tags', repository_path, branch_name])
808 818 self.run_git_command(cmd, fail_on_stderr=False)
809 819
810 820 def _local_merge(self, merge_message, user_name, user_email, heads):
811 821 """
812 822 Merge the given head into the checked out branch.
813 823
814 824 It will force a merge commit.
815 825
816 826 Currently it raises an error if the repo is empty, as it is not possible
817 827 to create a merge commit in an empty repo.
818 828
819 829 :param merge_message: The message to use for the merge commit.
820 830 :param heads: the heads to merge.
821 831 """
822 832 if self.bare:
823 833 raise RepositoryError('Cannot merge into a bare git repository')
824 834
825 835 if not heads:
826 836 return
827 837
828 838 if self.is_empty():
829 839 # TODO(skreft): do something more robust in this case.
830 raise RepositoryError(
831 'Do not know how to merge into empty repositories yet')
840 raise RepositoryError('Do not know how to merge into empty repositories yet')
832 841 unresolved = None
833 842
834 843 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
835 844 # commit message. We also specify the user who is doing the merge.
836 845 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
837 846 '-c', 'user.email=%s' % safe_str(user_email),
838 847 'merge', '--no-ff', '-m', safe_str(merge_message)]
839 cmd.extend(heads)
848
849 merge_cmd = cmd + heads
850
840 851 try:
841 output = self.run_git_command(cmd, fail_on_stderr=False)
852 self.run_git_command(merge_cmd, fail_on_stderr=False)
842 853 except RepositoryError:
843 854 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
844 855 fail_on_stderr=False)[0].splitlines()
845 856 # NOTE(marcink): we add U notation for consistent with HG backend output
846 857 unresolved = ['U {}'.format(f) for f in files]
847 858
848 859 # Cleanup any merge leftovers
860 self._remote.invalidate_vcs_cache()
849 861 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
850 862
851 863 if unresolved:
852 864 raise UnresolvedFilesInRepo(unresolved)
853 865 else:
854 866 raise
855 867
856 868 def _local_push(
857 869 self, source_branch, repository_path, target_branch,
858 870 enable_hooks=False, rc_scm_data=None):
859 871 """
860 872 Push the source_branch to the given repository and target_branch.
861 873
862 874 Currently it if the target_branch is not master and the target repo is
863 875 empty, the push will work, but then GitRepository won't be able to find
864 876 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
865 877 pointing to master, which does not exist).
866 878
867 879 It does not run the hooks in the target repo.
868 880 """
869 881 # TODO(skreft): deal with the case in which the target repo is empty,
870 882 # and the target_branch is not master.
871 883 target_repo = GitRepository(repository_path)
872 884 if (not target_repo.bare and
873 885 target_repo._current_branch() == target_branch):
874 886 # Git prevents pushing to the checked out branch, so simulate it by
875 887 # pulling into the target repository.
876 888 target_repo._local_pull(self.path, source_branch)
877 889 else:
878 890 cmd = ['push', os.path.abspath(repository_path),
879 891 '%s:%s' % (source_branch, target_branch)]
880 892 gitenv = {}
881 893 if rc_scm_data:
882 894 gitenv.update({'RC_SCM_DATA': rc_scm_data})
883 895
884 896 if not enable_hooks:
885 897 gitenv['RC_SKIP_HOOKS'] = '1'
886 898 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
887 899
888 900 def _get_new_pr_branch(self, source_branch, target_branch):
889 901 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
890 902 pr_branches = []
891 903 for branch in self.branches:
892 904 if branch.startswith(prefix):
893 905 pr_branches.append(int(branch[len(prefix):]))
894 906
895 907 if not pr_branches:
896 908 branch_id = 0
897 909 else:
898 910 branch_id = max(pr_branches) + 1
899 911
900 912 return '%s%d' % (prefix, branch_id)
901 913
902 914 def _maybe_prepare_merge_workspace(
903 915 self, repo_id, workspace_id, target_ref, source_ref):
904 916 shadow_repository_path = self._get_shadow_repository_path(
905 917 self.path, repo_id, workspace_id)
906 918 if not os.path.exists(shadow_repository_path):
907 919 self._local_clone(
908 920 shadow_repository_path, target_ref.name, source_ref.name)
909 921 log.debug('Prepared %s shadow repository in %s',
910 922 self.alias, shadow_repository_path)
911 923
912 924 return shadow_repository_path
913 925
914 926 def _merge_repo(self, repo_id, workspace_id, target_ref,
915 927 source_repo, source_ref, merge_message,
916 928 merger_name, merger_email, dry_run=False,
917 929 use_rebase=False, close_branch=False):
918 930
919 931 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
920 932 'rebase' if use_rebase else 'merge', dry_run)
921 933 if target_ref.commit_id != self.branches[target_ref.name]:
922 934 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
923 935 target_ref.commit_id, self.branches[target_ref.name])
924 936 return MergeResponse(
925 937 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
926 938 metadata={'target_ref': target_ref})
927 939
928 940 shadow_repository_path = self._maybe_prepare_merge_workspace(
929 941 repo_id, workspace_id, target_ref, source_ref)
930 942 shadow_repo = self.get_shadow_instance(shadow_repository_path)
931 943
932 944 # checkout source, if it's different. Otherwise we could not
933 945 # fetch proper commits for merge testing
934 946 if source_ref.name != target_ref.name:
935 947 if shadow_repo.get_remote_ref(source_ref.name):
936 948 shadow_repo._checkout(source_ref.name, force=True)
937 949
938 950 # checkout target, and fetch changes
939 951 shadow_repo._checkout(target_ref.name, force=True)
940 952
941 953 # fetch/reset pull the target, in case it is changed
942 954 # this handles even force changes
943 955 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
944 956 shadow_repo._local_reset(target_ref.name)
945 957
946 958 # Need to reload repo to invalidate the cache, or otherwise we cannot
947 959 # retrieve the last target commit.
948 960 shadow_repo = self.get_shadow_instance(shadow_repository_path)
949 961 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
950 962 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
951 963 target_ref, target_ref.commit_id,
952 964 shadow_repo.branches[target_ref.name])
953 965 return MergeResponse(
954 966 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
955 967 metadata={'target_ref': target_ref})
956 968
957 969 # calculate new branch
958 970 pr_branch = shadow_repo._get_new_pr_branch(
959 971 source_ref.name, target_ref.name)
960 972 log.debug('using pull-request merge branch: `%s`', pr_branch)
961 973 # checkout to temp branch, and fetch changes
962 974 shadow_repo._checkout(pr_branch, create=True)
963 975 try:
964 976 shadow_repo._local_fetch(source_repo.path, source_ref.name)
965 977 except RepositoryError:
966 978 log.exception('Failure when doing local fetch on '
967 979 'shadow repo: %s', shadow_repo)
968 980 return MergeResponse(
969 981 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
970 982 metadata={'source_ref': source_ref})
971 983
972 984 merge_ref = None
973 985 merge_failure_reason = MergeFailureReason.NONE
974 986 metadata = {}
975 987 try:
976 988 shadow_repo._local_merge(merge_message, merger_name, merger_email,
977 989 [source_ref.commit_id])
978 990 merge_possible = True
979 991
980 992 # Need to invalidate the cache, or otherwise we
981 993 # cannot retrieve the merge commit.
982 994 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
983 995 merge_commit_id = shadow_repo.branches[pr_branch]
984 996
985 997 # Set a reference pointing to the merge commit. This reference may
986 998 # be used to easily identify the last successful merge commit in
987 999 # the shadow repository.
988 1000 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
989 1001 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
990 1002 except RepositoryError as e:
991 1003 log.exception('Failure when doing local merge on git shadow repo')
992 1004 if isinstance(e, UnresolvedFilesInRepo):
993 1005 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
994 1006
995 1007 merge_possible = False
996 1008 merge_failure_reason = MergeFailureReason.MERGE_FAILED
997 1009
998 1010 if merge_possible and not dry_run:
999 1011 try:
1000 1012 shadow_repo._local_push(
1001 1013 pr_branch, self.path, target_ref.name, enable_hooks=True,
1002 1014 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1003 1015 merge_succeeded = True
1004 1016 except RepositoryError:
1005 1017 log.exception(
1006 1018 'Failure when doing local push from the shadow '
1007 1019 'repository to the target repository at %s.', self.path)
1008 1020 merge_succeeded = False
1009 1021 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1010 1022 metadata['target'] = 'git shadow repo'
1011 1023 metadata['merge_commit'] = pr_branch
1012 1024 else:
1013 1025 merge_succeeded = False
1014 1026
1015 1027 return MergeResponse(
1016 1028 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1017 1029 metadata=metadata)
@@ -1,978 +1,979 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, exceptions
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 46 from rhodecode.lib.vcs.compat import configparser
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = dict((commit_id, index)
106 106 for index, commit_id in enumerate(commit_ids))
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (safe_unicode(n), hexlify(h)) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup="none")
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def hg_rebuild_fn_cache(self):
294 294 update_cache = self._remote.hg_rebuild_fn_cache()
295 295
296 296 self._remote.invalidate_vcs_cache()
297 297 return update_cache
298 298
299 299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 300 if commit_id1 == commit_id2:
301 301 return commit_id1
302 302
303 303 ancestors = self._remote.revs_from_revspec(
304 304 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
305 305 other_path=repo2.path)
306 306 return repo2[ancestors[0]].raw_id if ancestors else None
307 307
308 308 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
309 309 if commit_id1 == commit_id2:
310 310 commits = []
311 311 else:
312 312 if merge:
313 313 indexes = self._remote.revs_from_revspec(
314 314 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
315 315 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
316 316 else:
317 317 indexes = self._remote.revs_from_revspec(
318 318 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
319 319 commit_id1, other_path=repo2.path)
320 320
321 321 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
322 322 for idx in indexes]
323 323
324 324 return commits
325 325
326 326 @staticmethod
327 327 def check_url(url, config):
328 328 """
329 329 Function will check given url and try to verify if it's a valid
330 330 link. Sometimes it may happened that mercurial will issue basic
331 331 auth request that can cause whole API to hang when used from python
332 332 or other external calls.
333 333
334 334 On failures it'll raise urllib2.HTTPError, exception is also thrown
335 335 when the return code is non 200
336 336 """
337 337 # check first if it's not an local url
338 338 if os.path.isdir(url) or url.startswith('file:'):
339 339 return True
340 340
341 341 # Request the _remote to verify the url
342 342 return connection.Hg.check_url(url, config.serialize())
343 343
344 344 @staticmethod
345 345 def is_valid_repository(path):
346 346 return os.path.isdir(os.path.join(path, '.hg'))
347 347
348 348 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
349 349 """
350 350 Function will check for mercurial repository in given path. If there
351 351 is no repository in that path it will raise an exception unless
352 352 `create` parameter is set to True - in that case repository would
353 353 be created.
354 354
355 355 If `src_url` is given, would try to clone repository from the
356 356 location at given clone_point. Additionally it'll make update to
357 357 working copy accordingly to `do_workspace_checkout` flag.
358 358 """
359 359 if create and os.path.exists(self.path):
360 360 raise RepositoryError(
361 361 "Cannot create repository at %s, location already exist"
362 362 % self.path)
363 363
364 364 if src_url:
365 365 url = str(self._get_url(src_url))
366 366 MercurialRepository.check_url(url, self.config)
367 367
368 368 self._remote.clone(url, self.path, do_workspace_checkout)
369 369
370 370 # Don't try to create if we've already cloned repo
371 371 create = False
372 372
373 373 if create:
374 374 os.makedirs(self.path, mode=0o755)
375 375 self._remote.localrepository(create)
376 376
377 377 @LazyProperty
378 378 def in_memory_commit(self):
379 379 return MercurialInMemoryCommit(self)
380 380
381 381 @LazyProperty
382 382 def description(self):
383 383 description = self._remote.get_config_value(
384 384 'web', 'description', untrusted=True)
385 385 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
386 386
387 387 @LazyProperty
388 388 def contact(self):
389 389 contact = (
390 390 self._remote.get_config_value("web", "contact") or
391 391 self._remote.get_config_value("ui", "username"))
392 392 return safe_unicode(contact or self.DEFAULT_CONTACT)
393 393
394 394 @LazyProperty
395 395 def last_change(self):
396 396 """
397 397 Returns last change made on this repository as
398 398 `datetime.datetime` object.
399 399 """
400 400 try:
401 401 return self.get_commit().date
402 402 except RepositoryError:
403 403 tzoffset = makedate()[1]
404 404 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
405 405
406 406 def _get_fs_mtime(self):
407 407 # fallback to filesystem
408 408 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
409 409 st_path = os.path.join(self.path, '.hg', "store")
410 410 if os.path.exists(cl_path):
411 411 return os.stat(cl_path).st_mtime
412 412 else:
413 413 return os.stat(st_path).st_mtime
414 414
415 415 def _get_url(self, url):
416 416 """
417 417 Returns normalized url. If schema is not given, would fall
418 418 to filesystem
419 419 (``file:///``) schema.
420 420 """
421 421 url = url.encode('utf8')
422 422 if url != 'default' and '://' not in url:
423 423 url = "file:" + urllib.pathname2url(url)
424 424 return url
425 425
426 426 def get_hook_location(self):
427 427 """
428 428 returns absolute path to location where hooks are stored
429 429 """
430 430 return os.path.join(self.path, '.hg', '.hgrc')
431 431
432 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
432 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
433 translate_tag=None, maybe_unreachable=False):
433 434 """
434 435 Returns ``MercurialCommit`` object representing repository's
435 436 commit at the given `commit_id` or `commit_idx`.
436 437 """
437 438 if self.is_empty():
438 439 raise EmptyRepositoryError("There are no commits yet")
439 440
440 441 if commit_id is not None:
441 442 self._validate_commit_id(commit_id)
442 443 try:
443 444 # we have cached idx, use it without contacting the remote
444 445 idx = self._commit_ids[commit_id]
445 446 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
446 447 except KeyError:
447 448 pass
448 449
449 450 elif commit_idx is not None:
450 451 self._validate_commit_idx(commit_idx)
451 452 try:
452 453 _commit_id = self.commit_ids[commit_idx]
453 454 if commit_idx < 0:
454 455 commit_idx = self.commit_ids.index(_commit_id)
455 456
456 457 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
457 458 except IndexError:
458 459 commit_id = commit_idx
459 460 else:
460 461 commit_id = "tip"
461 462
462 463 if isinstance(commit_id, unicode):
463 464 commit_id = safe_str(commit_id)
464 465
465 466 try:
466 467 raw_id, idx = self._remote.lookup(commit_id, both=True)
467 468 except CommitDoesNotExistError:
468 469 msg = "Commit {} does not exist for `{}`".format(
469 470 *map(safe_str, [commit_id, self.name]))
470 471 raise CommitDoesNotExistError(msg)
471 472
472 473 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
473 474
474 475 def get_commits(
475 476 self, start_id=None, end_id=None, start_date=None, end_date=None,
476 477 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
477 478 """
478 479 Returns generator of ``MercurialCommit`` objects from start to end
479 480 (both are inclusive)
480 481
481 482 :param start_id: None, str(commit_id)
482 483 :param end_id: None, str(commit_id)
483 484 :param start_date: if specified, commits with commit date less than
484 485 ``start_date`` would be filtered out from returned set
485 486 :param end_date: if specified, commits with commit date greater than
486 487 ``end_date`` would be filtered out from returned set
487 488 :param branch_name: if specified, commits not reachable from given
488 489 branch would be filtered out from returned set
489 490 :param show_hidden: Show hidden commits such as obsolete or hidden from
490 491 Mercurial evolve
491 492 :raise BranchDoesNotExistError: If given ``branch_name`` does not
492 493 exist.
493 494 :raise CommitDoesNotExistError: If commit for given ``start`` or
494 495 ``end`` could not be found.
495 496 """
496 497 # actually we should check now if it's not an empty repo
497 498 if self.is_empty():
498 499 raise EmptyRepositoryError("There are no commits yet")
499 500 self._validate_branch_name(branch_name)
500 501
501 502 branch_ancestors = False
502 503 if start_id is not None:
503 504 self._validate_commit_id(start_id)
504 505 c_start = self.get_commit(commit_id=start_id)
505 506 start_pos = self._commit_ids[c_start.raw_id]
506 507 else:
507 508 start_pos = None
508 509
509 510 if end_id is not None:
510 511 self._validate_commit_id(end_id)
511 512 c_end = self.get_commit(commit_id=end_id)
512 513 end_pos = max(0, self._commit_ids[c_end.raw_id])
513 514 else:
514 515 end_pos = None
515 516
516 517 if None not in [start_id, end_id] and start_pos > end_pos:
517 518 raise RepositoryError(
518 519 "Start commit '%s' cannot be after end commit '%s'" %
519 520 (start_id, end_id))
520 521
521 522 if end_pos is not None:
522 523 end_pos += 1
523 524
524 525 commit_filter = []
525 526
526 527 if branch_name and not branch_ancestors:
527 528 commit_filter.append('branch("%s")' % (branch_name,))
528 529 elif branch_name and branch_ancestors:
529 530 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
530 531
531 532 if start_date and not end_date:
532 533 commit_filter.append('date(">%s")' % (start_date,))
533 534 if end_date and not start_date:
534 535 commit_filter.append('date("<%s")' % (end_date,))
535 536 if start_date and end_date:
536 537 commit_filter.append(
537 538 'date(">%s") and date("<%s")' % (start_date, end_date))
538 539
539 540 if not show_hidden:
540 541 commit_filter.append('not obsolete()')
541 542 commit_filter.append('not hidden()')
542 543
543 544 # TODO: johbo: Figure out a simpler way for this solution
544 545 collection_generator = CollectionGenerator
545 546 if commit_filter:
546 547 commit_filter = ' and '.join(map(safe_str, commit_filter))
547 548 revisions = self._remote.rev_range([commit_filter])
548 549 collection_generator = MercurialIndexBasedCollectionGenerator
549 550 else:
550 551 revisions = self.commit_ids
551 552
552 553 if start_pos or end_pos:
553 554 revisions = revisions[start_pos:end_pos]
554 555
555 556 return collection_generator(self, revisions, pre_load=pre_load)
556 557
557 558 def pull(self, url, commit_ids=None):
558 559 """
559 560 Pull changes from external location.
560 561
561 562 :param commit_ids: Optional. Can be set to a list of commit ids
562 563 which shall be pulled from the other repository.
563 564 """
564 565 url = self._get_url(url)
565 566 self._remote.pull(url, commit_ids=commit_ids)
566 567 self._remote.invalidate_vcs_cache()
567 568
568 569 def fetch(self, url, commit_ids=None):
569 570 """
570 571 Backward compatibility with GIT fetch==pull
571 572 """
572 573 return self.pull(url, commit_ids=commit_ids)
573 574
574 575 def push(self, url):
575 576 url = self._get_url(url)
576 577 self._remote.sync_push(url)
577 578
578 579 def _local_clone(self, clone_path):
579 580 """
580 581 Create a local clone of the current repo.
581 582 """
582 583 self._remote.clone(self.path, clone_path, update_after_clone=True,
583 584 hooks=False)
584 585
585 586 def _update(self, revision, clean=False):
586 587 """
587 588 Update the working copy to the specified revision.
588 589 """
589 590 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
590 591 self._remote.update(revision, clean=clean)
591 592
592 593 def _identify(self):
593 594 """
594 595 Return the current state of the working directory.
595 596 """
596 597 return self._remote.identify().strip().rstrip('+')
597 598
598 599 def _heads(self, branch=None):
599 600 """
600 601 Return the commit ids of the repository heads.
601 602 """
602 603 return self._remote.heads(branch=branch).strip().split(' ')
603 604
604 605 def _ancestor(self, revision1, revision2):
605 606 """
606 607 Return the common ancestor of the two revisions.
607 608 """
608 609 return self._remote.ancestor(revision1, revision2)
609 610
610 611 def _local_push(
611 612 self, revision, repository_path, push_branches=False,
612 613 enable_hooks=False):
613 614 """
614 615 Push the given revision to the specified repository.
615 616
616 617 :param push_branches: allow to create branches in the target repo.
617 618 """
618 619 self._remote.push(
619 620 [revision], repository_path, hooks=enable_hooks,
620 621 push_branches=push_branches)
621 622
622 623 def _local_merge(self, target_ref, merge_message, user_name, user_email,
623 624 source_ref, use_rebase=False, dry_run=False):
624 625 """
625 626 Merge the given source_revision into the checked out revision.
626 627
627 628 Returns the commit id of the merge and a boolean indicating if the
628 629 commit needs to be pushed.
629 630 """
630 631 self._update(target_ref.commit_id, clean=True)
631 632
632 633 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
633 634 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
634 635
635 636 if ancestor == source_ref.commit_id:
636 637 # Nothing to do, the changes were already integrated
637 638 return target_ref.commit_id, False
638 639
639 640 elif ancestor == target_ref.commit_id and is_the_same_branch:
640 641 # In this case we should force a commit message
641 642 return source_ref.commit_id, True
642 643
643 644 unresolved = None
644 645 if use_rebase:
645 646 try:
646 647 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
647 648 target_ref.commit_id)
648 649 self.bookmark(bookmark_name, revision=source_ref.commit_id)
649 650 self._remote.rebase(
650 651 source=source_ref.commit_id, dest=target_ref.commit_id)
651 652 self._remote.invalidate_vcs_cache()
652 653 self._update(bookmark_name, clean=True)
653 654 return self._identify(), True
654 655 except RepositoryError as e:
655 656 # The rebase-abort may raise another exception which 'hides'
656 657 # the original one, therefore we log it here.
657 658 log.exception('Error while rebasing shadow repo during merge.')
658 659 if 'unresolved conflicts' in safe_str(e):
659 660 unresolved = self._remote.get_unresolved_files()
660 661 log.debug('unresolved files: %s', unresolved)
661 662
662 663 # Cleanup any rebase leftovers
663 664 self._remote.invalidate_vcs_cache()
664 665 self._remote.rebase(abort=True)
665 666 self._remote.invalidate_vcs_cache()
666 667 self._remote.update(clean=True)
667 668 if unresolved:
668 669 raise UnresolvedFilesInRepo(unresolved)
669 670 else:
670 671 raise
671 672 else:
672 673 try:
673 674 self._remote.merge(source_ref.commit_id)
674 675 self._remote.invalidate_vcs_cache()
675 676 self._remote.commit(
676 677 message=safe_str(merge_message),
677 678 username=safe_str('%s <%s>' % (user_name, user_email)))
678 679 self._remote.invalidate_vcs_cache()
679 680 return self._identify(), True
680 681 except RepositoryError as e:
681 682 # The merge-abort may raise another exception which 'hides'
682 683 # the original one, therefore we log it here.
683 684 log.exception('Error while merging shadow repo during merge.')
684 685 if 'unresolved merge conflicts' in safe_str(e):
685 686 unresolved = self._remote.get_unresolved_files()
686 687 log.debug('unresolved files: %s', unresolved)
687 688
688 689 # Cleanup any merge leftovers
689 690 self._remote.update(clean=True)
690 691 if unresolved:
691 692 raise UnresolvedFilesInRepo(unresolved)
692 693 else:
693 694 raise
694 695
695 696 def _local_close(self, target_ref, user_name, user_email,
696 697 source_ref, close_message=''):
697 698 """
698 699 Close the branch of the given source_revision
699 700
700 701 Returns the commit id of the close and a boolean indicating if the
701 702 commit needs to be pushed.
702 703 """
703 704 self._update(source_ref.commit_id)
704 705 message = close_message or "Closing branch: `{}`".format(source_ref.name)
705 706 try:
706 707 self._remote.commit(
707 708 message=safe_str(message),
708 709 username=safe_str('%s <%s>' % (user_name, user_email)),
709 710 close_branch=True)
710 711 self._remote.invalidate_vcs_cache()
711 712 return self._identify(), True
712 713 except RepositoryError:
713 714 # Cleanup any commit leftovers
714 715 self._remote.update(clean=True)
715 716 raise
716 717
717 718 def _is_the_same_branch(self, target_ref, source_ref):
718 719 return (
719 720 self._get_branch_name(target_ref) ==
720 721 self._get_branch_name(source_ref))
721 722
722 723 def _get_branch_name(self, ref):
723 724 if ref.type == 'branch':
724 725 return ref.name
725 726 return self._remote.ctx_branch(ref.commit_id)
726 727
727 728 def _maybe_prepare_merge_workspace(
728 729 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
729 730 shadow_repository_path = self._get_shadow_repository_path(
730 731 self.path, repo_id, workspace_id)
731 732 if not os.path.exists(shadow_repository_path):
732 733 self._local_clone(shadow_repository_path)
733 734 log.debug(
734 735 'Prepared shadow repository in %s', shadow_repository_path)
735 736
736 737 return shadow_repository_path
737 738
738 739 def _merge_repo(self, repo_id, workspace_id, target_ref,
739 740 source_repo, source_ref, merge_message,
740 741 merger_name, merger_email, dry_run=False,
741 742 use_rebase=False, close_branch=False):
742 743
743 744 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
744 745 'rebase' if use_rebase else 'merge', dry_run)
745 746 if target_ref.commit_id not in self._heads():
746 747 return MergeResponse(
747 748 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
748 749 metadata={'target_ref': target_ref})
749 750
750 751 try:
751 752 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
752 753 heads = '\n,'.join(self._heads(target_ref.name))
753 754 metadata = {
754 755 'target_ref': target_ref,
755 756 'source_ref': source_ref,
756 757 'heads': heads
757 758 }
758 759 return MergeResponse(
759 760 False, False, None,
760 761 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
761 762 metadata=metadata)
762 763 except CommitDoesNotExistError:
763 764 log.exception('Failure when looking up branch heads on hg target')
764 765 return MergeResponse(
765 766 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
766 767 metadata={'target_ref': target_ref})
767 768
768 769 shadow_repository_path = self._maybe_prepare_merge_workspace(
769 770 repo_id, workspace_id, target_ref, source_ref)
770 771 shadow_repo = self.get_shadow_instance(shadow_repository_path)
771 772
772 773 log.debug('Pulling in target reference %s', target_ref)
773 774 self._validate_pull_reference(target_ref)
774 775 shadow_repo._local_pull(self.path, target_ref)
775 776
776 777 try:
777 778 log.debug('Pulling in source reference %s', source_ref)
778 779 source_repo._validate_pull_reference(source_ref)
779 780 shadow_repo._local_pull(source_repo.path, source_ref)
780 781 except CommitDoesNotExistError:
781 782 log.exception('Failure when doing local pull on hg shadow repo')
782 783 return MergeResponse(
783 784 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
784 785 metadata={'source_ref': source_ref})
785 786
786 787 merge_ref = None
787 788 merge_commit_id = None
788 789 close_commit_id = None
789 790 merge_failure_reason = MergeFailureReason.NONE
790 791 metadata = {}
791 792
792 793 # enforce that close branch should be used only in case we source from
793 794 # an actual Branch
794 795 close_branch = close_branch and source_ref.type == 'branch'
795 796
796 797 # don't allow to close branch if source and target are the same
797 798 close_branch = close_branch and source_ref.name != target_ref.name
798 799
799 800 needs_push_on_close = False
800 801 if close_branch and not use_rebase and not dry_run:
801 802 try:
802 803 close_commit_id, needs_push_on_close = shadow_repo._local_close(
803 804 target_ref, merger_name, merger_email, source_ref)
804 805 merge_possible = True
805 806 except RepositoryError:
806 807 log.exception('Failure when doing close branch on '
807 808 'shadow repo: %s', shadow_repo)
808 809 merge_possible = False
809 810 merge_failure_reason = MergeFailureReason.MERGE_FAILED
810 811 else:
811 812 merge_possible = True
812 813
813 814 needs_push = False
814 815 if merge_possible:
815 816 try:
816 817 merge_commit_id, needs_push = shadow_repo._local_merge(
817 818 target_ref, merge_message, merger_name, merger_email,
818 819 source_ref, use_rebase=use_rebase, dry_run=dry_run)
819 820 merge_possible = True
820 821
821 822 # read the state of the close action, if it
822 823 # maybe required a push
823 824 needs_push = needs_push or needs_push_on_close
824 825
825 826 # Set a bookmark pointing to the merge commit. This bookmark
826 827 # may be used to easily identify the last successful merge
827 828 # commit in the shadow repository.
828 829 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
829 830 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
830 831 except SubrepoMergeError:
831 832 log.exception(
832 833 'Subrepo merge error during local merge on hg shadow repo.')
833 834 merge_possible = False
834 835 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
835 836 needs_push = False
836 837 except RepositoryError as e:
837 838 log.exception('Failure when doing local merge on hg shadow repo')
838 839 if isinstance(e, UnresolvedFilesInRepo):
839 840 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
840 841
841 842 merge_possible = False
842 843 merge_failure_reason = MergeFailureReason.MERGE_FAILED
843 844 needs_push = False
844 845
845 846 if merge_possible and not dry_run:
846 847 if needs_push:
847 848 # In case the target is a bookmark, update it, so after pushing
848 849 # the bookmarks is also updated in the target.
849 850 if target_ref.type == 'book':
850 851 shadow_repo.bookmark(
851 852 target_ref.name, revision=merge_commit_id)
852 853 try:
853 854 shadow_repo_with_hooks = self.get_shadow_instance(
854 855 shadow_repository_path,
855 856 enable_hooks=True)
856 857 # This is the actual merge action, we push from shadow
857 858 # into origin.
858 859 # Note: the push_branches option will push any new branch
859 860 # defined in the source repository to the target. This may
860 861 # be dangerous as branches are permanent in Mercurial.
861 862 # This feature was requested in issue #441.
862 863 shadow_repo_with_hooks._local_push(
863 864 merge_commit_id, self.path, push_branches=True,
864 865 enable_hooks=True)
865 866
866 867 # maybe we also need to push the close_commit_id
867 868 if close_commit_id:
868 869 shadow_repo_with_hooks._local_push(
869 870 close_commit_id, self.path, push_branches=True,
870 871 enable_hooks=True)
871 872 merge_succeeded = True
872 873 except RepositoryError:
873 874 log.exception(
874 875 'Failure when doing local push from the shadow '
875 876 'repository to the target repository at %s.', self.path)
876 877 merge_succeeded = False
877 878 merge_failure_reason = MergeFailureReason.PUSH_FAILED
878 879 metadata['target'] = 'hg shadow repo'
879 880 metadata['merge_commit'] = merge_commit_id
880 881 else:
881 882 merge_succeeded = True
882 883 else:
883 884 merge_succeeded = False
884 885
885 886 return MergeResponse(
886 887 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
887 888 metadata=metadata)
888 889
889 890 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
890 891 config = self.config.copy()
891 892 if not enable_hooks:
892 893 config.clear_section('hooks')
893 894 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
894 895
895 896 def _validate_pull_reference(self, reference):
896 897 if not (reference.name in self.bookmarks or
897 898 reference.name in self.branches or
898 899 self.get_commit(reference.commit_id)):
899 900 raise CommitDoesNotExistError(
900 901 'Unknown branch, bookmark or commit id')
901 902
902 903 def _local_pull(self, repository_path, reference):
903 904 """
904 905 Fetch a branch, bookmark or commit from a local repository.
905 906 """
906 907 repository_path = os.path.abspath(repository_path)
907 908 if repository_path == self.path:
908 909 raise ValueError('Cannot pull from the same repository')
909 910
910 911 reference_type_to_option_name = {
911 912 'book': 'bookmark',
912 913 'branch': 'branch',
913 914 }
914 915 option_name = reference_type_to_option_name.get(
915 916 reference.type, 'revision')
916 917
917 918 if option_name == 'revision':
918 919 ref = reference.commit_id
919 920 else:
920 921 ref = reference.name
921 922
922 923 options = {option_name: [ref]}
923 924 self._remote.pull_cmd(repository_path, hooks=False, **options)
924 925 self._remote.invalidate_vcs_cache()
925 926
926 927 def bookmark(self, bookmark, revision=None):
927 928 if isinstance(bookmark, unicode):
928 929 bookmark = safe_str(bookmark)
929 930 self._remote.bookmark(bookmark, revision=revision)
930 931 self._remote.invalidate_vcs_cache()
931 932
932 933 def get_path_permissions(self, username):
933 934 hgacl_file = os.path.join(self.path, '.hg/hgacl')
934 935
935 936 def read_patterns(suffix):
936 937 svalue = None
937 938 for section, option in [
938 939 ('narrowacl', username + suffix),
939 940 ('narrowacl', 'default' + suffix),
940 941 ('narrowhgacl', username + suffix),
941 942 ('narrowhgacl', 'default' + suffix)
942 943 ]:
943 944 try:
944 945 svalue = hgacl.get(section, option)
945 946 break # stop at the first value we find
946 947 except configparser.NoOptionError:
947 948 pass
948 949 if not svalue:
949 950 return None
950 951 result = ['/']
951 952 for pattern in svalue.split():
952 953 result.append(pattern)
953 954 if '*' not in pattern and '?' not in pattern:
954 955 result.append(pattern + '/*')
955 956 return result
956 957
957 958 if os.path.exists(hgacl_file):
958 959 try:
959 960 hgacl = configparser.RawConfigParser()
960 961 hgacl.read(hgacl_file)
961 962
962 963 includes = read_patterns('.includes')
963 964 excludes = read_patterns('.excludes')
964 965 return BasePathPermissionChecker.create_from_patterns(
965 966 includes, excludes)
966 967 except BaseException as e:
967 968 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
968 969 hgacl_file, self.name, e)
969 970 raise exceptions.RepositoryRequirementError(msg)
970 971 else:
971 972 return None
972 973
973 974
974 975 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
975 976
976 977 def _commit_factory(self, commit_id):
977 978 return self.repo.get_commit(
978 979 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,369 +1,370 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SVN repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import date_astimestamp
33 33 from rhodecode.lib.utils import safe_str, safe_unicode
34 34 from rhodecode.lib.utils2 import CachedProperty
35 35 from rhodecode.lib.vcs import connection, path as vcspath
36 36 from rhodecode.lib.vcs.backends import base
37 37 from rhodecode.lib.vcs.backends.svn.commit import (
38 38 SubversionCommit, _date_from_svn_properties)
39 39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 41 from rhodecode.lib.vcs.conf import settings
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 44 VCSError, NodeDoesNotExistError)
45 45
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class SubversionRepository(base.BaseRepository):
51 51 """
52 52 Subversion backend implementation
53 53
54 54 .. important::
55 55
56 56 It is very important to distinguish the commit index and the commit id
57 57 which is assigned by Subversion. The first one is always handled as an
58 58 `int` by this implementation. The commit id assigned by Subversion on
59 59 the other side will always be a `str`.
60 60
61 61 There is a specific trap since the first commit will have the index
62 62 ``0`` but the svn id will be ``"1"``.
63 63
64 64 """
65 65
66 66 # Note: Subversion does not really have a default branch name.
67 67 DEFAULT_BRANCH_NAME = None
68 68
69 69 contact = base.BaseRepository.DEFAULT_CONTACT
70 70 description = base.BaseRepository.DEFAULT_DESCRIPTION
71 71
72 72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
73 73 bare=False, **kwargs):
74 74 self.path = safe_str(os.path.abspath(repo_path))
75 75 self.config = config if config else self.get_default_config()
76 76 self.with_wire = with_wire or {"cache": False} # default should not use cache
77 77
78 78 self._init_repo(create, src_url)
79 79
80 80 # caches
81 81 self._commit_ids = {}
82 82
83 83 @LazyProperty
84 84 def _remote(self):
85 85 repo_id = self.path
86 86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
87 87
88 88 def _init_repo(self, create, src_url):
89 89 if create and os.path.exists(self.path):
90 90 raise RepositoryError(
91 91 "Cannot create repository at %s, location already exist"
92 92 % self.path)
93 93
94 94 if create:
95 95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
96 96 if src_url:
97 97 src_url = _sanitize_url(src_url)
98 98 self._remote.import_remote_repository(src_url)
99 99 else:
100 100 self._check_path()
101 101
102 102 @CachedProperty
103 103 def commit_ids(self):
104 104 head = self._remote.lookup(None)
105 105 return [str(r) for r in xrange(1, head + 1)]
106 106
107 107 def _rebuild_cache(self, commit_ids):
108 108 pass
109 109
110 110 def run_svn_command(self, cmd, **opts):
111 111 """
112 112 Runs given ``cmd`` as svn command and returns tuple
113 113 (stdout, stderr).
114 114
115 115 :param cmd: full svn command to be executed
116 116 :param opts: env options to pass into Subprocess command
117 117 """
118 118 if not isinstance(cmd, list):
119 119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
120 120
121 121 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 122 out, err = self._remote.run_svn_command(cmd, **opts)
123 123 if err and not skip_stderr_log:
124 124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 125 return out, err
126 126
127 127 @LazyProperty
128 128 def branches(self):
129 129 return self._tags_or_branches('vcs_svn_branch')
130 130
131 131 @LazyProperty
132 132 def branches_closed(self):
133 133 return {}
134 134
135 135 @LazyProperty
136 136 def bookmarks(self):
137 137 return {}
138 138
139 139 @LazyProperty
140 140 def branches_all(self):
141 141 # TODO: johbo: Implement proper branch support
142 142 all_branches = {}
143 143 all_branches.update(self.branches)
144 144 all_branches.update(self.branches_closed)
145 145 return all_branches
146 146
147 147 @LazyProperty
148 148 def tags(self):
149 149 return self._tags_or_branches('vcs_svn_tag')
150 150
151 151 def _tags_or_branches(self, config_section):
152 152 found_items = {}
153 153
154 154 if self.is_empty():
155 155 return {}
156 156
157 157 for pattern in self._patterns_from_section(config_section):
158 158 pattern = vcspath.sanitize(pattern)
159 159 tip = self.get_commit()
160 160 try:
161 161 if pattern.endswith('*'):
162 162 basedir = tip.get_node(vcspath.dirname(pattern))
163 163 directories = basedir.dirs
164 164 else:
165 165 directories = (tip.get_node(pattern), )
166 166 except NodeDoesNotExistError:
167 167 continue
168 168 found_items.update(
169 169 (safe_unicode(n.path),
170 170 self.commit_ids[-1])
171 171 for n in directories)
172 172
173 173 def get_name(item):
174 174 return item[0]
175 175
176 176 return OrderedDict(sorted(found_items.items(), key=get_name))
177 177
178 178 def _patterns_from_section(self, section):
179 179 return (pattern for key, pattern in self.config.items(section))
180 180
181 181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
182 182 if self != repo2:
183 183 raise ValueError(
184 184 "Subversion does not support getting common ancestor of"
185 185 " different repositories.")
186 186
187 187 if int(commit_id1) < int(commit_id2):
188 188 return commit_id1
189 189 return commit_id2
190 190
191 191 def verify(self):
192 192 verify = self._remote.verify()
193 193
194 194 self._remote.invalidate_vcs_cache()
195 195 return verify
196 196
197 197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
198 198 # TODO: johbo: Implement better comparison, this is a very naive
199 199 # version which does not allow to compare branches, tags or folders
200 200 # at all.
201 201 if repo2 != self:
202 202 raise ValueError(
203 203 "Subversion does not support comparison of of different "
204 204 "repositories.")
205 205
206 206 if commit_id1 == commit_id2:
207 207 return []
208 208
209 209 commit_idx1 = self._get_commit_idx(commit_id1)
210 210 commit_idx2 = self._get_commit_idx(commit_id2)
211 211
212 212 commits = [
213 213 self.get_commit(commit_idx=idx)
214 214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
215 215
216 216 return commits
217 217
218 218 def _get_commit_idx(self, commit_id):
219 219 try:
220 220 svn_rev = int(commit_id)
221 221 except:
222 222 # TODO: johbo: this might be only one case, HEAD, check this
223 223 svn_rev = self._remote.lookup(commit_id)
224 224 commit_idx = svn_rev - 1
225 225 if commit_idx >= len(self.commit_ids):
226 226 raise CommitDoesNotExistError(
227 227 "Commit at index %s does not exist." % (commit_idx, ))
228 228 return commit_idx
229 229
230 230 @staticmethod
231 231 def check_url(url, config):
232 232 """
233 233 Check if `url` is a valid source to import a Subversion repository.
234 234 """
235 235 # convert to URL if it's a local directory
236 236 if os.path.isdir(url):
237 237 url = 'file://' + urllib.pathname2url(url)
238 238 return connection.Svn.check_url(url, config.serialize())
239 239
240 240 @staticmethod
241 241 def is_valid_repository(path):
242 242 try:
243 243 SubversionRepository(path)
244 244 return True
245 245 except VCSError:
246 246 pass
247 247 return False
248 248
249 249 def _check_path(self):
250 250 if not os.path.exists(self.path):
251 251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
252 252 if not self._remote.is_path_valid_repository(self.path):
253 253 raise VCSError(
254 254 'Path "%s" does not contain a Subversion repository' %
255 255 (self.path, ))
256 256
257 257 @LazyProperty
258 258 def last_change(self):
259 259 """
260 260 Returns last change made on this repository as
261 261 `datetime.datetime` object.
262 262 """
263 263 # Subversion always has a first commit which has id "0" and contains
264 264 # what we are looking for.
265 265 last_id = len(self.commit_ids)
266 266 properties = self._remote.revision_properties(last_id)
267 267 return _date_from_svn_properties(properties)
268 268
269 269 @LazyProperty
270 270 def in_memory_commit(self):
271 271 return SubversionInMemoryCommit(self)
272 272
273 273 def get_hook_location(self):
274 274 """
275 275 returns absolute path to location where hooks are stored
276 276 """
277 277 return os.path.join(self.path, 'hooks')
278 278
279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
280 translate_tag=None, maybe_unreachable=False):
280 281 if self.is_empty():
281 282 raise EmptyRepositoryError("There are no commits yet")
282 283 if commit_id is not None:
283 284 self._validate_commit_id(commit_id)
284 285 elif commit_idx is not None:
285 286 self._validate_commit_idx(commit_idx)
286 287 try:
287 288 commit_id = self.commit_ids[commit_idx]
288 289 except IndexError:
289 290 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
290 291
291 292 commit_id = self._sanitize_commit_id(commit_id)
292 293 commit = SubversionCommit(repository=self, commit_id=commit_id)
293 294 return commit
294 295
295 296 def get_commits(
296 297 self, start_id=None, end_id=None, start_date=None, end_date=None,
297 298 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
298 299 if self.is_empty():
299 300 raise EmptyRepositoryError("There are no commit_ids yet")
300 301 self._validate_branch_name(branch_name)
301 302
302 303 if start_id is not None:
303 304 self._validate_commit_id(start_id)
304 305 if end_id is not None:
305 306 self._validate_commit_id(end_id)
306 307
307 308 start_raw_id = self._sanitize_commit_id(start_id)
308 309 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
309 310 end_raw_id = self._sanitize_commit_id(end_id)
310 311 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
311 312
312 313 if None not in [start_id, end_id] and start_pos > end_pos:
313 314 raise RepositoryError(
314 315 "Start commit '%s' cannot be after end commit '%s'" %
315 316 (start_id, end_id))
316 317 if end_pos is not None:
317 318 end_pos += 1
318 319
319 320 # Date based filtering
320 321 if start_date or end_date:
321 322 start_raw_id, end_raw_id = self._remote.lookup_interval(
322 323 date_astimestamp(start_date) if start_date else None,
323 324 date_astimestamp(end_date) if end_date else None)
324 325 start_pos = start_raw_id - 1
325 326 end_pos = end_raw_id
326 327
327 328 commit_ids = self.commit_ids
328 329
329 330 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
330 331 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
331 332 svn_rev = long(self.commit_ids[-1])
332 333 commit_ids = self._remote.node_history(
333 334 path=branch_name, revision=svn_rev, limit=None)
334 335 commit_ids = [str(i) for i in reversed(commit_ids)]
335 336
336 337 if start_pos or end_pos:
337 338 commit_ids = commit_ids[start_pos:end_pos]
338 339 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
339 340
340 341 def _sanitize_commit_id(self, commit_id):
341 342 if commit_id and commit_id.isdigit():
342 343 if int(commit_id) <= len(self.commit_ids):
343 344 return commit_id
344 345 else:
345 346 raise CommitDoesNotExistError(
346 347 "Commit %s does not exist." % (commit_id, ))
347 348 if commit_id not in [
348 349 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
349 350 raise CommitDoesNotExistError(
350 351 "Commit id %s not understood." % (commit_id, ))
351 352 svn_rev = self._remote.lookup('HEAD')
352 353 return str(svn_rev)
353 354
354 355 def get_diff(
355 356 self, commit1, commit2, path=None, ignore_whitespace=False,
356 357 context=3, path1=None):
357 358 self._validate_diff_commits(commit1, commit2)
358 359 svn_rev1 = long(commit1.raw_id)
359 360 svn_rev2 = long(commit2.raw_id)
360 361 diff = self._remote.diff(
361 362 svn_rev1, svn_rev2, path1=path1, path2=path,
362 363 ignore_whitespace=ignore_whitespace, context=context)
363 364 return SubversionDiff(diff)
364 365
365 366
366 367 def _sanitize_url(url):
367 368 if '://' not in url:
368 369 url = 'file://' + urllib.pathname2url(url)
369 370 return url
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now