##// END OF EJS Templates
pull-request: fixed typo in wrong ref type error, and added which...
marcink -
r1687:d43a5d8f default
parent child Browse files
Show More
@@ -1,1095 +1,1095 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 pull requests controller for rhodecode for initializing pull requests
23 23 """
24 24 import types
25 25
26 26 import peppercorn
27 27 import formencode
28 28 import logging
29 29 import collections
30 30
31 31 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
32 32 from pylons import request, tmpl_context as c, url
33 33 from pylons.controllers.util import redirect
34 34 from pylons.i18n.translation import _
35 35 from pyramid.threadlocal import get_current_registry
36 36 from sqlalchemy.sql import func
37 37 from sqlalchemy.sql.expression import or_
38 38
39 39 from rhodecode import events
40 40 from rhodecode.lib import auth, diffs, helpers as h, codeblocks
41 41 from rhodecode.lib.ext_json import json
42 42 from rhodecode.lib.base import (
43 43 BaseRepoController, render, vcs_operation_context)
44 44 from rhodecode.lib.auth import (
45 45 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
46 46 HasAcceptedRepoType, XHRRequired)
47 47 from rhodecode.lib.channelstream import channelstream_request
48 48 from rhodecode.lib.utils import jsonify
49 49 from rhodecode.lib.utils2 import (
50 50 safe_int, safe_str, str2bool, safe_unicode)
51 51 from rhodecode.lib.vcs.backends.base import (
52 52 EmptyCommit, UpdateFailureReason, EmptyRepository)
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError,
55 55 NodeDoesNotExistError)
56 56
57 57 from rhodecode.model.changeset_status import ChangesetStatusModel
58 58 from rhodecode.model.comment import CommentsModel
59 59 from rhodecode.model.db import (PullRequest, ChangesetStatus, ChangesetComment,
60 60 Repository, PullRequestVersion)
61 61 from rhodecode.model.forms import PullRequestForm
62 62 from rhodecode.model.meta import Session
63 63 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
64 64
65 65 log = logging.getLogger(__name__)
66 66
67 67
68 68 class PullrequestsController(BaseRepoController):
69 69
70 70 def __before__(self):
71 71 super(PullrequestsController, self).__before__()
72 72 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
73 73 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
74 74
75 75 def _extract_ordering(self, request):
76 76 column_index = safe_int(request.GET.get('order[0][column]'))
77 77 order_dir = request.GET.get('order[0][dir]', 'desc')
78 78 order_by = request.GET.get(
79 79 'columns[%s][data][sort]' % column_index, 'name_raw')
80 80 return order_by, order_dir
81 81
82 82 @LoginRequired()
83 83 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
84 84 'repository.admin')
85 85 @HasAcceptedRepoType('git', 'hg')
86 86 def show_all(self, repo_name):
87 87 # filter types
88 88 c.active = 'open'
89 89 c.source = str2bool(request.GET.get('source'))
90 90 c.closed = str2bool(request.GET.get('closed'))
91 91 c.my = str2bool(request.GET.get('my'))
92 92 c.awaiting_review = str2bool(request.GET.get('awaiting_review'))
93 93 c.awaiting_my_review = str2bool(request.GET.get('awaiting_my_review'))
94 94 c.repo_name = repo_name
95 95
96 96 opened_by = None
97 97 if c.my:
98 98 c.active = 'my'
99 99 opened_by = [c.rhodecode_user.user_id]
100 100
101 101 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
102 102 if c.closed:
103 103 c.active = 'closed'
104 104 statuses = [PullRequest.STATUS_CLOSED]
105 105
106 106 if c.awaiting_review and not c.source:
107 107 c.active = 'awaiting'
108 108 if c.source and not c.awaiting_review:
109 109 c.active = 'source'
110 110 if c.awaiting_my_review:
111 111 c.active = 'awaiting_my'
112 112
113 113 data = self._get_pull_requests_list(
114 114 repo_name=repo_name, opened_by=opened_by, statuses=statuses)
115 115 if not request.is_xhr:
116 116 c.data = json.dumps(data['data'])
117 117 c.records_total = data['recordsTotal']
118 118 return render('/pullrequests/pullrequests.mako')
119 119 else:
120 120 return json.dumps(data)
121 121
122 122 def _get_pull_requests_list(self, repo_name, opened_by, statuses):
123 123 # pagination
124 124 start = safe_int(request.GET.get('start'), 0)
125 125 length = safe_int(request.GET.get('length'), c.visual.dashboard_items)
126 126 order_by, order_dir = self._extract_ordering(request)
127 127
128 128 if c.awaiting_review:
129 129 pull_requests = PullRequestModel().get_awaiting_review(
130 130 repo_name, source=c.source, opened_by=opened_by,
131 131 statuses=statuses, offset=start, length=length,
132 132 order_by=order_by, order_dir=order_dir)
133 133 pull_requests_total_count = PullRequestModel(
134 134 ).count_awaiting_review(
135 135 repo_name, source=c.source, statuses=statuses,
136 136 opened_by=opened_by)
137 137 elif c.awaiting_my_review:
138 138 pull_requests = PullRequestModel().get_awaiting_my_review(
139 139 repo_name, source=c.source, opened_by=opened_by,
140 140 user_id=c.rhodecode_user.user_id, statuses=statuses,
141 141 offset=start, length=length, order_by=order_by,
142 142 order_dir=order_dir)
143 143 pull_requests_total_count = PullRequestModel(
144 144 ).count_awaiting_my_review(
145 145 repo_name, source=c.source, user_id=c.rhodecode_user.user_id,
146 146 statuses=statuses, opened_by=opened_by)
147 147 else:
148 148 pull_requests = PullRequestModel().get_all(
149 149 repo_name, source=c.source, opened_by=opened_by,
150 150 statuses=statuses, offset=start, length=length,
151 151 order_by=order_by, order_dir=order_dir)
152 152 pull_requests_total_count = PullRequestModel().count_all(
153 153 repo_name, source=c.source, statuses=statuses,
154 154 opened_by=opened_by)
155 155
156 156 from rhodecode.lib.utils import PartialRenderer
157 157 _render = PartialRenderer('data_table/_dt_elements.mako')
158 158 data = []
159 159 for pr in pull_requests:
160 160 comments = CommentsModel().get_all_comments(
161 161 c.rhodecode_db_repo.repo_id, pull_request=pr)
162 162
163 163 data.append({
164 164 'name': _render('pullrequest_name',
165 165 pr.pull_request_id, pr.target_repo.repo_name),
166 166 'name_raw': pr.pull_request_id,
167 167 'status': _render('pullrequest_status',
168 168 pr.calculated_review_status()),
169 169 'title': _render(
170 170 'pullrequest_title', pr.title, pr.description),
171 171 'description': h.escape(pr.description),
172 172 'updated_on': _render('pullrequest_updated_on',
173 173 h.datetime_to_time(pr.updated_on)),
174 174 'updated_on_raw': h.datetime_to_time(pr.updated_on),
175 175 'created_on': _render('pullrequest_updated_on',
176 176 h.datetime_to_time(pr.created_on)),
177 177 'created_on_raw': h.datetime_to_time(pr.created_on),
178 178 'author': _render('pullrequest_author',
179 179 pr.author.full_contact, ),
180 180 'author_raw': pr.author.full_name,
181 181 'comments': _render('pullrequest_comments', len(comments)),
182 182 'comments_raw': len(comments),
183 183 'closed': pr.is_closed(),
184 184 })
185 185 # json used to render the grid
186 186 data = ({
187 187 'data': data,
188 188 'recordsTotal': pull_requests_total_count,
189 189 'recordsFiltered': pull_requests_total_count,
190 190 })
191 191 return data
192 192
193 193 @LoginRequired()
194 194 @NotAnonymous()
195 195 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
196 196 'repository.admin')
197 197 @HasAcceptedRepoType('git', 'hg')
198 198 def index(self):
199 199 source_repo = c.rhodecode_db_repo
200 200
201 201 try:
202 202 source_repo.scm_instance().get_commit()
203 203 except EmptyRepositoryError:
204 204 h.flash(h.literal(_('There are no commits yet')),
205 205 category='warning')
206 206 redirect(url('summary_home', repo_name=source_repo.repo_name))
207 207
208 208 commit_id = request.GET.get('commit')
209 209 branch_ref = request.GET.get('branch')
210 210 bookmark_ref = request.GET.get('bookmark')
211 211
212 212 try:
213 213 source_repo_data = PullRequestModel().generate_repo_data(
214 214 source_repo, commit_id=commit_id,
215 215 branch=branch_ref, bookmark=bookmark_ref)
216 216 except CommitDoesNotExistError as e:
217 217 log.exception(e)
218 218 h.flash(_('Commit does not exist'), 'error')
219 219 redirect(url('pullrequest_home', repo_name=source_repo.repo_name))
220 220
221 221 default_target_repo = source_repo
222 222
223 223 if source_repo.parent:
224 224 parent_vcs_obj = source_repo.parent.scm_instance()
225 225 if parent_vcs_obj and not parent_vcs_obj.is_empty():
226 226 # change default if we have a parent repo
227 227 default_target_repo = source_repo.parent
228 228
229 229 target_repo_data = PullRequestModel().generate_repo_data(
230 230 default_target_repo)
231 231
232 232 selected_source_ref = source_repo_data['refs']['selected_ref']
233 233
234 234 title_source_ref = selected_source_ref.split(':', 2)[1]
235 235 c.default_title = PullRequestModel().generate_pullrequest_title(
236 236 source=source_repo.repo_name,
237 237 source_ref=title_source_ref,
238 238 target=default_target_repo.repo_name
239 239 )
240 240
241 241 c.default_repo_data = {
242 242 'source_repo_name': source_repo.repo_name,
243 243 'source_refs_json': json.dumps(source_repo_data),
244 244 'target_repo_name': default_target_repo.repo_name,
245 245 'target_refs_json': json.dumps(target_repo_data),
246 246 }
247 247 c.default_source_ref = selected_source_ref
248 248
249 249 return render('/pullrequests/pullrequest.mako')
250 250
251 251 @LoginRequired()
252 252 @NotAnonymous()
253 253 @XHRRequired()
254 254 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
255 255 'repository.admin')
256 256 @jsonify
257 257 def get_repo_refs(self, repo_name, target_repo_name):
258 258 repo = Repository.get_by_repo_name(target_repo_name)
259 259 if not repo:
260 260 raise HTTPNotFound
261 261 return PullRequestModel().generate_repo_data(repo)
262 262
263 263 @LoginRequired()
264 264 @NotAnonymous()
265 265 @XHRRequired()
266 266 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
267 267 'repository.admin')
268 268 @jsonify
269 269 def get_repo_destinations(self, repo_name):
270 270 repo = Repository.get_by_repo_name(repo_name)
271 271 if not repo:
272 272 raise HTTPNotFound
273 273 filter_query = request.GET.get('query')
274 274
275 275 query = Repository.query() \
276 276 .order_by(func.length(Repository.repo_name)) \
277 277 .filter(or_(
278 278 Repository.repo_name == repo.repo_name,
279 279 Repository.fork_id == repo.repo_id))
280 280
281 281 if filter_query:
282 282 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
283 283 query = query.filter(
284 284 Repository.repo_name.ilike(ilike_expression))
285 285
286 286 add_parent = False
287 287 if repo.parent:
288 288 if filter_query in repo.parent.repo_name:
289 289 parent_vcs_obj = repo.parent.scm_instance()
290 290 if parent_vcs_obj and not parent_vcs_obj.is_empty():
291 291 add_parent = True
292 292
293 293 limit = 20 - 1 if add_parent else 20
294 294 all_repos = query.limit(limit).all()
295 295 if add_parent:
296 296 all_repos += [repo.parent]
297 297
298 298 repos = []
299 299 for obj in self.scm_model.get_repos(all_repos):
300 300 repos.append({
301 301 'id': obj['name'],
302 302 'text': obj['name'],
303 303 'type': 'repo',
304 304 'obj': obj['dbrepo']
305 305 })
306 306
307 307 data = {
308 308 'more': False,
309 309 'results': [{
310 310 'text': _('Repositories'),
311 311 'children': repos
312 312 }] if repos else []
313 313 }
314 314 return data
315 315
316 316 @LoginRequired()
317 317 @NotAnonymous()
318 318 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
319 319 'repository.admin')
320 320 @HasAcceptedRepoType('git', 'hg')
321 321 @auth.CSRFRequired()
322 322 def create(self, repo_name):
323 323 repo = Repository.get_by_repo_name(repo_name)
324 324 if not repo:
325 325 raise HTTPNotFound
326 326
327 327 controls = peppercorn.parse(request.POST.items())
328 328
329 329 try:
330 330 _form = PullRequestForm(repo.repo_id)().to_python(controls)
331 331 except formencode.Invalid as errors:
332 332 if errors.error_dict.get('revisions'):
333 333 msg = 'Revisions: %s' % errors.error_dict['revisions']
334 334 elif errors.error_dict.get('pullrequest_title'):
335 335 msg = _('Pull request requires a title with min. 3 chars')
336 336 else:
337 337 msg = _('Error creating pull request: {}').format(errors)
338 338 log.exception(msg)
339 339 h.flash(msg, 'error')
340 340
341 341 # would rather just go back to form ...
342 342 return redirect(url('pullrequest_home', repo_name=repo_name))
343 343
344 344 source_repo = _form['source_repo']
345 345 source_ref = _form['source_ref']
346 346 target_repo = _form['target_repo']
347 347 target_ref = _form['target_ref']
348 348 commit_ids = _form['revisions'][::-1]
349 349 reviewers = [
350 350 (r['user_id'], r['reasons']) for r in _form['review_members']]
351 351
352 352 # find the ancestor for this pr
353 353 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
354 354 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
355 355
356 356 source_scm = source_db_repo.scm_instance()
357 357 target_scm = target_db_repo.scm_instance()
358 358
359 359 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
360 360 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
361 361
362 362 ancestor = source_scm.get_common_ancestor(
363 363 source_commit.raw_id, target_commit.raw_id, target_scm)
364 364
365 365 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
366 366 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
367 367
368 368 pullrequest_title = _form['pullrequest_title']
369 369 title_source_ref = source_ref.split(':', 2)[1]
370 370 if not pullrequest_title:
371 371 pullrequest_title = PullRequestModel().generate_pullrequest_title(
372 372 source=source_repo,
373 373 source_ref=title_source_ref,
374 374 target=target_repo
375 375 )
376 376
377 377 description = _form['pullrequest_desc']
378 378 try:
379 379 pull_request = PullRequestModel().create(
380 380 c.rhodecode_user.user_id, source_repo, source_ref, target_repo,
381 381 target_ref, commit_ids, reviewers, pullrequest_title,
382 382 description
383 383 )
384 384 Session().commit()
385 385 h.flash(_('Successfully opened new pull request'),
386 386 category='success')
387 387 except Exception as e:
388 388 msg = _('Error occurred during sending pull request')
389 389 log.exception(msg)
390 390 h.flash(msg, category='error')
391 391 return redirect(url('pullrequest_home', repo_name=repo_name))
392 392
393 393 return redirect(url('pullrequest_show', repo_name=target_repo,
394 394 pull_request_id=pull_request.pull_request_id))
395 395
396 396 @LoginRequired()
397 397 @NotAnonymous()
398 398 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
399 399 'repository.admin')
400 400 @auth.CSRFRequired()
401 401 @jsonify
402 402 def update(self, repo_name, pull_request_id):
403 403 pull_request_id = safe_int(pull_request_id)
404 404 pull_request = PullRequest.get_or_404(pull_request_id)
405 405 # only owner or admin can update it
406 406 allowed_to_update = PullRequestModel().check_user_update(
407 407 pull_request, c.rhodecode_user)
408 408 if allowed_to_update:
409 409 controls = peppercorn.parse(request.POST.items())
410 410
411 411 if 'review_members' in controls:
412 412 self._update_reviewers(
413 413 pull_request_id, controls['review_members'])
414 414 elif str2bool(request.POST.get('update_commits', 'false')):
415 415 self._update_commits(pull_request)
416 416 elif str2bool(request.POST.get('close_pull_request', 'false')):
417 417 self._reject_close(pull_request)
418 418 elif str2bool(request.POST.get('edit_pull_request', 'false')):
419 419 self._edit_pull_request(pull_request)
420 420 else:
421 421 raise HTTPBadRequest()
422 422 return True
423 423 raise HTTPForbidden()
424 424
425 425 def _edit_pull_request(self, pull_request):
426 426 try:
427 427 PullRequestModel().edit(
428 428 pull_request, request.POST.get('title'),
429 429 request.POST.get('description'))
430 430 except ValueError:
431 431 msg = _(u'Cannot update closed pull requests.')
432 432 h.flash(msg, category='error')
433 433 return
434 434 else:
435 435 Session().commit()
436 436
437 437 msg = _(u'Pull request title & description updated.')
438 438 h.flash(msg, category='success')
439 439 return
440 440
441 441 def _update_commits(self, pull_request):
442 442 resp = PullRequestModel().update_commits(pull_request)
443 443
444 444 if resp.executed:
445 445
446 446 if resp.target_changed and resp.source_changed:
447 447 changed = 'target and source repositories'
448 448 elif resp.target_changed and not resp.source_changed:
449 449 changed = 'target repository'
450 450 elif not resp.target_changed and resp.source_changed:
451 451 changed = 'source repository'
452 452 else:
453 453 changed = 'nothing'
454 454
455 455 msg = _(
456 456 u'Pull request updated to "{source_commit_id}" with '
457 457 u'{count_added} added, {count_removed} removed commits. '
458 458 u'Source of changes: {change_source}')
459 459 msg = msg.format(
460 460 source_commit_id=pull_request.source_ref_parts.commit_id,
461 461 count_added=len(resp.changes.added),
462 462 count_removed=len(resp.changes.removed),
463 463 change_source=changed)
464 464 h.flash(msg, category='success')
465 465
466 466 registry = get_current_registry()
467 467 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
468 468 channelstream_config = rhodecode_plugins.get('channelstream', {})
469 469 if channelstream_config.get('enabled'):
470 470 message = msg + (
471 471 ' - <a onclick="window.location.reload()">'
472 472 '<strong>{}</strong></a>'.format(_('Reload page')))
473 473 channel = '/repo${}$/pr/{}'.format(
474 474 pull_request.target_repo.repo_name,
475 475 pull_request.pull_request_id
476 476 )
477 477 payload = {
478 478 'type': 'message',
479 479 'user': 'system',
480 480 'exclude_users': [request.user.username],
481 481 'channel': channel,
482 482 'message': {
483 483 'message': message,
484 484 'level': 'success',
485 485 'topic': '/notifications'
486 486 }
487 487 }
488 488 channelstream_request(
489 489 channelstream_config, [payload], '/message',
490 490 raise_exc=False)
491 491 else:
492 492 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
493 493 warning_reasons = [
494 494 UpdateFailureReason.NO_CHANGE,
495 UpdateFailureReason.WRONG_REF_TPYE,
495 UpdateFailureReason.WRONG_REF_TYPE,
496 496 ]
497 497 category = 'warning' if resp.reason in warning_reasons else 'error'
498 498 h.flash(msg, category=category)
499 499
500 500 @auth.CSRFRequired()
501 501 @LoginRequired()
502 502 @NotAnonymous()
503 503 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
504 504 'repository.admin')
505 505 def merge(self, repo_name, pull_request_id):
506 506 """
507 507 POST /{repo_name}/pull-request/{pull_request_id}
508 508
509 509 Merge will perform a server-side merge of the specified
510 510 pull request, if the pull request is approved and mergeable.
511 511 After successful merging, the pull request is automatically
512 512 closed, with a relevant comment.
513 513 """
514 514 pull_request_id = safe_int(pull_request_id)
515 515 pull_request = PullRequest.get_or_404(pull_request_id)
516 516 user = c.rhodecode_user
517 517
518 518 check = MergeCheck.validate(pull_request, user)
519 519 merge_possible = not check.failed
520 520
521 521 for err_type, error_msg in check.errors:
522 522 h.flash(error_msg, category=err_type)
523 523
524 524 if merge_possible:
525 525 log.debug("Pre-conditions checked, trying to merge.")
526 526 extras = vcs_operation_context(
527 527 request.environ, repo_name=pull_request.target_repo.repo_name,
528 528 username=user.username, action='push',
529 529 scm=pull_request.target_repo.repo_type)
530 530 self._merge_pull_request(pull_request, user, extras)
531 531
532 532 return redirect(url(
533 533 'pullrequest_show',
534 534 repo_name=pull_request.target_repo.repo_name,
535 535 pull_request_id=pull_request.pull_request_id))
536 536
537 537 def _merge_pull_request(self, pull_request, user, extras):
538 538 merge_resp = PullRequestModel().merge(
539 539 pull_request, user, extras=extras)
540 540
541 541 if merge_resp.executed:
542 542 log.debug("The merge was successful, closing the pull request.")
543 543 PullRequestModel().close_pull_request(
544 544 pull_request.pull_request_id, user)
545 545 Session().commit()
546 546 msg = _('Pull request was successfully merged and closed.')
547 547 h.flash(msg, category='success')
548 548 else:
549 549 log.debug(
550 550 "The merge was not successful. Merge response: %s",
551 551 merge_resp)
552 552 msg = PullRequestModel().merge_status_message(
553 553 merge_resp.failure_reason)
554 554 h.flash(msg, category='error')
555 555
556 556 def _update_reviewers(self, pull_request_id, review_members):
557 557 reviewers = [
558 558 (int(r['user_id']), r['reasons']) for r in review_members]
559 559 PullRequestModel().update_reviewers(pull_request_id, reviewers)
560 560 Session().commit()
561 561
562 562 def _reject_close(self, pull_request):
563 563 if pull_request.is_closed():
564 564 raise HTTPForbidden()
565 565
566 566 PullRequestModel().close_pull_request_with_comment(
567 567 pull_request, c.rhodecode_user, c.rhodecode_db_repo)
568 568 Session().commit()
569 569
570 570 @LoginRequired()
571 571 @NotAnonymous()
572 572 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
573 573 'repository.admin')
574 574 @auth.CSRFRequired()
575 575 @jsonify
576 576 def delete(self, repo_name, pull_request_id):
577 577 pull_request_id = safe_int(pull_request_id)
578 578 pull_request = PullRequest.get_or_404(pull_request_id)
579 579
580 580 pr_closed = pull_request.is_closed()
581 581 allowed_to_delete = PullRequestModel().check_user_delete(
582 582 pull_request, c.rhodecode_user) and not pr_closed
583 583
584 584 # only owner can delete it !
585 585 if allowed_to_delete:
586 586 PullRequestModel().delete(pull_request)
587 587 Session().commit()
588 588 h.flash(_('Successfully deleted pull request'),
589 589 category='success')
590 590 return redirect(url('my_account_pullrequests'))
591 591
592 592 h.flash(_('Your are not allowed to delete this pull request'),
593 593 category='error')
594 594 raise HTTPForbidden()
595 595
596 596 def _get_pr_version(self, pull_request_id, version=None):
597 597 pull_request_id = safe_int(pull_request_id)
598 598 at_version = None
599 599
600 600 if version and version == 'latest':
601 601 pull_request_ver = PullRequest.get(pull_request_id)
602 602 pull_request_obj = pull_request_ver
603 603 _org_pull_request_obj = pull_request_obj
604 604 at_version = 'latest'
605 605 elif version:
606 606 pull_request_ver = PullRequestVersion.get_or_404(version)
607 607 pull_request_obj = pull_request_ver
608 608 _org_pull_request_obj = pull_request_ver.pull_request
609 609 at_version = pull_request_ver.pull_request_version_id
610 610 else:
611 611 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(pull_request_id)
612 612
613 613 pull_request_display_obj = PullRequest.get_pr_display_object(
614 614 pull_request_obj, _org_pull_request_obj)
615 615
616 616 return _org_pull_request_obj, pull_request_obj, \
617 617 pull_request_display_obj, at_version
618 618
619 619 def _get_diffset(
620 620 self, source_repo, source_ref_id, target_ref_id, target_commit,
621 621 source_commit, diff_limit, file_limit, display_inline_comments):
622 622 vcs_diff = PullRequestModel().get_diff(
623 623 source_repo, source_ref_id, target_ref_id)
624 624
625 625 diff_processor = diffs.DiffProcessor(
626 626 vcs_diff, format='newdiff', diff_limit=diff_limit,
627 627 file_limit=file_limit, show_full_diff=c.fulldiff)
628 628
629 629 _parsed = diff_processor.prepare()
630 630
631 631 def _node_getter(commit):
632 632 def get_node(fname):
633 633 try:
634 634 return commit.get_node(fname)
635 635 except NodeDoesNotExistError:
636 636 return None
637 637
638 638 return get_node
639 639
640 640 diffset = codeblocks.DiffSet(
641 641 repo_name=c.repo_name,
642 642 source_repo_name=c.source_repo.repo_name,
643 643 source_node_getter=_node_getter(target_commit),
644 644 target_node_getter=_node_getter(source_commit),
645 645 comments=display_inline_comments
646 646 )
647 647 diffset = diffset.render_patchset(
648 648 _parsed, target_commit.raw_id, source_commit.raw_id)
649 649
650 650 return diffset
651 651
652 652 @LoginRequired()
653 653 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
654 654 'repository.admin')
655 655 def show(self, repo_name, pull_request_id):
656 656 pull_request_id = safe_int(pull_request_id)
657 657 version = request.GET.get('version')
658 658 from_version = request.GET.get('from_version') or version
659 659 merge_checks = request.GET.get('merge_checks')
660 660 c.fulldiff = str2bool(request.GET.get('fulldiff'))
661 661
662 662 (pull_request_latest,
663 663 pull_request_at_ver,
664 664 pull_request_display_obj,
665 665 at_version) = self._get_pr_version(
666 666 pull_request_id, version=version)
667 667 pr_closed = pull_request_latest.is_closed()
668 668
669 669 if pr_closed and (version or from_version):
670 670 # not allow to browse versions
671 671 return redirect(h.url('pullrequest_show', repo_name=repo_name,
672 672 pull_request_id=pull_request_id))
673 673
674 674 versions = pull_request_display_obj.versions()
675 675
676 676 c.at_version = at_version
677 677 c.at_version_num = (at_version
678 678 if at_version and at_version != 'latest'
679 679 else None)
680 680 c.at_version_pos = ChangesetComment.get_index_from_version(
681 681 c.at_version_num, versions)
682 682
683 683 (prev_pull_request_latest,
684 684 prev_pull_request_at_ver,
685 685 prev_pull_request_display_obj,
686 686 prev_at_version) = self._get_pr_version(
687 687 pull_request_id, version=from_version)
688 688
689 689 c.from_version = prev_at_version
690 690 c.from_version_num = (prev_at_version
691 691 if prev_at_version and prev_at_version != 'latest'
692 692 else None)
693 693 c.from_version_pos = ChangesetComment.get_index_from_version(
694 694 c.from_version_num, versions)
695 695
696 696 # define if we're in COMPARE mode or VIEW at version mode
697 697 compare = at_version != prev_at_version
698 698
699 699 # pull_requests repo_name we opened it against
700 700 # ie. target_repo must match
701 701 if repo_name != pull_request_at_ver.target_repo.repo_name:
702 702 raise HTTPNotFound
703 703
704 704 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
705 705 pull_request_at_ver)
706 706
707 707 c.pull_request = pull_request_display_obj
708 708 c.pull_request_latest = pull_request_latest
709 709
710 710 if compare or (at_version and not at_version == 'latest'):
711 711 c.allowed_to_change_status = False
712 712 c.allowed_to_update = False
713 713 c.allowed_to_merge = False
714 714 c.allowed_to_delete = False
715 715 c.allowed_to_comment = False
716 716 c.allowed_to_close = False
717 717 else:
718 718 c.allowed_to_change_status = PullRequestModel(). \
719 719 check_user_change_status(pull_request_at_ver, c.rhodecode_user) \
720 720 and not pr_closed
721 721
722 722 c.allowed_to_update = PullRequestModel().check_user_update(
723 723 pull_request_latest, c.rhodecode_user) and not pr_closed
724 724 c.allowed_to_merge = PullRequestModel().check_user_merge(
725 725 pull_request_latest, c.rhodecode_user) and not pr_closed
726 726 c.allowed_to_delete = PullRequestModel().check_user_delete(
727 727 pull_request_latest, c.rhodecode_user) and not pr_closed
728 728 c.allowed_to_comment = not pr_closed
729 729 c.allowed_to_close = c.allowed_to_merge and not pr_closed
730 730
731 731 # check merge capabilities
732 732 _merge_check = MergeCheck.validate(
733 733 pull_request_latest, user=c.rhodecode_user)
734 734 c.pr_merge_errors = _merge_check.error_details
735 735 c.pr_merge_possible = not _merge_check.failed
736 736 c.pr_merge_message = _merge_check.merge_msg
737 737
738 738 c.pull_request_review_status = _merge_check.review_status
739 739 if merge_checks:
740 740 return render('/pullrequests/pullrequest_merge_checks.mako')
741 741
742 742 comments_model = CommentsModel()
743 743
744 744 # reviewers and statuses
745 745 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
746 746 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
747 747
748 748 # GENERAL COMMENTS with versions #
749 749 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
750 750 q = q.order_by(ChangesetComment.comment_id.asc())
751 751 general_comments = q.order_by(ChangesetComment.pull_request_version_id.asc())
752 752
753 753 # pick comments we want to render at current version
754 754 c.comment_versions = comments_model.aggregate_comments(
755 755 general_comments, versions, c.at_version_num)
756 756 c.comments = c.comment_versions[c.at_version_num]['until']
757 757
758 758 # INLINE COMMENTS with versions #
759 759 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
760 760 q = q.order_by(ChangesetComment.comment_id.asc())
761 761 inline_comments = q.order_by(ChangesetComment.pull_request_version_id.asc())
762 762 c.inline_versions = comments_model.aggregate_comments(
763 763 inline_comments, versions, c.at_version_num, inline=True)
764 764
765 765 # inject latest version
766 766 latest_ver = PullRequest.get_pr_display_object(
767 767 pull_request_latest, pull_request_latest)
768 768
769 769 c.versions = versions + [latest_ver]
770 770
771 771 # if we use version, then do not show later comments
772 772 # than current version
773 773 display_inline_comments = collections.defaultdict(
774 774 lambda: collections.defaultdict(list))
775 775 for co in inline_comments:
776 776 if c.at_version_num:
777 777 # pick comments that are at least UPTO given version, so we
778 778 # don't render comments for higher version
779 779 should_render = co.pull_request_version_id and \
780 780 co.pull_request_version_id <= c.at_version_num
781 781 else:
782 782 # showing all, for 'latest'
783 783 should_render = True
784 784
785 785 if should_render:
786 786 display_inline_comments[co.f_path][co.line_no].append(co)
787 787
788 788 # load diff data into template context, if we use compare mode then
789 789 # diff is calculated based on changes between versions of PR
790 790
791 791 source_repo = pull_request_at_ver.source_repo
792 792 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
793 793
794 794 target_repo = pull_request_at_ver.target_repo
795 795 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
796 796
797 797 if compare:
798 798 # in compare switch the diff base to latest commit from prev version
799 799 target_ref_id = prev_pull_request_display_obj.revisions[0]
800 800
801 801 # despite opening commits for bookmarks/branches/tags, we always
802 802 # convert this to rev to prevent changes after bookmark or branch change
803 803 c.source_ref_type = 'rev'
804 804 c.source_ref = source_ref_id
805 805
806 806 c.target_ref_type = 'rev'
807 807 c.target_ref = target_ref_id
808 808
809 809 c.source_repo = source_repo
810 810 c.target_repo = target_repo
811 811
812 812 # diff_limit is the old behavior, will cut off the whole diff
813 813 # if the limit is applied otherwise will just hide the
814 814 # big files from the front-end
815 815 diff_limit = self.cut_off_limit_diff
816 816 file_limit = self.cut_off_limit_file
817 817
818 818 c.commit_ranges = []
819 819 source_commit = EmptyCommit()
820 820 target_commit = EmptyCommit()
821 821 c.missing_requirements = False
822 822
823 823 source_scm = source_repo.scm_instance()
824 824 target_scm = target_repo.scm_instance()
825 825
826 826 # try first shadow repo, fallback to regular repo
827 827 try:
828 828 commits_source_repo = pull_request_latest.get_shadow_repo()
829 829 except Exception:
830 830 log.debug('Failed to get shadow repo', exc_info=True)
831 831 commits_source_repo = source_scm
832 832
833 833 c.commits_source_repo = commits_source_repo
834 834 commit_cache = {}
835 835 try:
836 836 pre_load = ["author", "branch", "date", "message"]
837 837 show_revs = pull_request_at_ver.revisions
838 838 for rev in show_revs:
839 839 comm = commits_source_repo.get_commit(
840 840 commit_id=rev, pre_load=pre_load)
841 841 c.commit_ranges.append(comm)
842 842 commit_cache[comm.raw_id] = comm
843 843
844 844 target_commit = commits_source_repo.get_commit(
845 845 commit_id=safe_str(target_ref_id))
846 846 source_commit = commits_source_repo.get_commit(
847 847 commit_id=safe_str(source_ref_id))
848 848 except CommitDoesNotExistError:
849 849 pass
850 850 except RepositoryRequirementError:
851 851 log.warning(
852 852 'Failed to get all required data from repo', exc_info=True)
853 853 c.missing_requirements = True
854 854
855 855 c.ancestor = None # set it to None, to hide it from PR view
856 856
857 857 try:
858 858 ancestor_id = source_scm.get_common_ancestor(
859 859 source_commit.raw_id, target_commit.raw_id, target_scm)
860 860 c.ancestor_commit = source_scm.get_commit(ancestor_id)
861 861 except Exception:
862 862 c.ancestor_commit = None
863 863
864 864 c.statuses = source_repo.statuses(
865 865 [x.raw_id for x in c.commit_ranges])
866 866
867 867 # auto collapse if we have more than limit
868 868 collapse_limit = diffs.DiffProcessor._collapse_commits_over
869 869 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
870 870 c.compare_mode = compare
871 871
872 872 c.missing_commits = False
873 873 if (c.missing_requirements or isinstance(source_commit, EmptyCommit)
874 874 or source_commit == target_commit):
875 875
876 876 c.missing_commits = True
877 877 else:
878 878
879 879 c.diffset = self._get_diffset(
880 880 commits_source_repo, source_ref_id, target_ref_id,
881 881 target_commit, source_commit,
882 882 diff_limit, file_limit, display_inline_comments)
883 883
884 884 c.limited_diff = c.diffset.limited_diff
885 885
886 886 # calculate removed files that are bound to comments
887 887 comment_deleted_files = [
888 888 fname for fname in display_inline_comments
889 889 if fname not in c.diffset.file_stats]
890 890
891 891 c.deleted_files_comments = collections.defaultdict(dict)
892 892 for fname, per_line_comments in display_inline_comments.items():
893 893 if fname in comment_deleted_files:
894 894 c.deleted_files_comments[fname]['stats'] = 0
895 895 c.deleted_files_comments[fname]['comments'] = list()
896 896 for lno, comments in per_line_comments.items():
897 897 c.deleted_files_comments[fname]['comments'].extend(
898 898 comments)
899 899
900 900 # this is a hack to properly display links, when creating PR, the
901 901 # compare view and others uses different notation, and
902 902 # compare_commits.mako renders links based on the target_repo.
903 903 # We need to swap that here to generate it properly on the html side
904 904 c.target_repo = c.source_repo
905 905
906 906 c.commit_statuses = ChangesetStatus.STATUSES
907 907
908 908 c.show_version_changes = not pr_closed
909 909 if c.show_version_changes:
910 910 cur_obj = pull_request_at_ver
911 911 prev_obj = prev_pull_request_at_ver
912 912
913 913 old_commit_ids = prev_obj.revisions
914 914 new_commit_ids = cur_obj.revisions
915 915 commit_changes = PullRequestModel()._calculate_commit_id_changes(
916 916 old_commit_ids, new_commit_ids)
917 917 c.commit_changes_summary = commit_changes
918 918
919 919 # calculate the diff for commits between versions
920 920 c.commit_changes = []
921 921 mark = lambda cs, fw: list(
922 922 h.itertools.izip_longest([], cs, fillvalue=fw))
923 923 for c_type, raw_id in mark(commit_changes.added, 'a') \
924 924 + mark(commit_changes.removed, 'r') \
925 925 + mark(commit_changes.common, 'c'):
926 926
927 927 if raw_id in commit_cache:
928 928 commit = commit_cache[raw_id]
929 929 else:
930 930 try:
931 931 commit = commits_source_repo.get_commit(raw_id)
932 932 except CommitDoesNotExistError:
933 933 # in case we fail extracting still use "dummy" commit
934 934 # for display in commit diff
935 935 commit = h.AttributeDict(
936 936 {'raw_id': raw_id,
937 937 'message': 'EMPTY or MISSING COMMIT'})
938 938 c.commit_changes.append([c_type, commit])
939 939
940 940 # current user review statuses for each version
941 941 c.review_versions = {}
942 942 if c.rhodecode_user.user_id in allowed_reviewers:
943 943 for co in general_comments:
944 944 if co.author.user_id == c.rhodecode_user.user_id:
945 945 # each comment has a status change
946 946 status = co.status_change
947 947 if status:
948 948 _ver_pr = status[0].comment.pull_request_version_id
949 949 c.review_versions[_ver_pr] = status[0]
950 950
951 951 return render('/pullrequests/pullrequest_show.mako')
952 952
953 953 @LoginRequired()
954 954 @NotAnonymous()
955 955 @HasRepoPermissionAnyDecorator(
956 956 'repository.read', 'repository.write', 'repository.admin')
957 957 @auth.CSRFRequired()
958 958 @jsonify
959 959 def comment(self, repo_name, pull_request_id):
960 960 pull_request_id = safe_int(pull_request_id)
961 961 pull_request = PullRequest.get_or_404(pull_request_id)
962 962 if pull_request.is_closed():
963 963 raise HTTPForbidden()
964 964
965 965 status = request.POST.get('changeset_status', None)
966 966 text = request.POST.get('text')
967 967 comment_type = request.POST.get('comment_type')
968 968 resolves_comment_id = request.POST.get('resolves_comment_id', None)
969 969 close_pull_request = request.POST.get('close_pull_request')
970 970
971 971 close_pr = False
972 972 # only owner or admin or person with write permissions
973 973 allowed_to_close = PullRequestModel().check_user_update(
974 974 pull_request, c.rhodecode_user)
975 975
976 976 if close_pull_request and allowed_to_close:
977 977 close_pr = True
978 978 pull_request_review_status = pull_request.calculated_review_status()
979 979 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
980 980 # approved only if we have voting consent
981 981 status = ChangesetStatus.STATUS_APPROVED
982 982 else:
983 983 status = ChangesetStatus.STATUS_REJECTED
984 984
985 985 allowed_to_change_status = PullRequestModel().check_user_change_status(
986 986 pull_request, c.rhodecode_user)
987 987
988 988 if status and allowed_to_change_status:
989 989 message = (_('Status change %(transition_icon)s %(status)s')
990 990 % {'transition_icon': '>',
991 991 'status': ChangesetStatus.get_status_lbl(status)})
992 992 if close_pr:
993 993 message = _('Closing with') + ' ' + message
994 994 text = text or message
995 995 comm = CommentsModel().create(
996 996 text=text,
997 997 repo=c.rhodecode_db_repo.repo_id,
998 998 user=c.rhodecode_user.user_id,
999 999 pull_request=pull_request_id,
1000 1000 f_path=request.POST.get('f_path'),
1001 1001 line_no=request.POST.get('line'),
1002 1002 status_change=(ChangesetStatus.get_status_lbl(status)
1003 1003 if status and allowed_to_change_status else None),
1004 1004 status_change_type=(status
1005 1005 if status and allowed_to_change_status else None),
1006 1006 closing_pr=close_pr,
1007 1007 comment_type=comment_type,
1008 1008 resolves_comment_id=resolves_comment_id
1009 1009 )
1010 1010
1011 1011 if allowed_to_change_status:
1012 1012 old_calculated_status = pull_request.calculated_review_status()
1013 1013 # get status if set !
1014 1014 if status:
1015 1015 ChangesetStatusModel().set_status(
1016 1016 c.rhodecode_db_repo.repo_id,
1017 1017 status,
1018 1018 c.rhodecode_user.user_id,
1019 1019 comm,
1020 1020 pull_request=pull_request_id
1021 1021 )
1022 1022
1023 1023 Session().flush()
1024 1024 events.trigger(events.PullRequestCommentEvent(pull_request, comm))
1025 1025 # we now calculate the status of pull request, and based on that
1026 1026 # calculation we set the commits status
1027 1027 calculated_status = pull_request.calculated_review_status()
1028 1028 if old_calculated_status != calculated_status:
1029 1029 PullRequestModel()._trigger_pull_request_hook(
1030 1030 pull_request, c.rhodecode_user, 'review_status_change')
1031 1031
1032 1032 calculated_status_lbl = ChangesetStatus.get_status_lbl(
1033 1033 calculated_status)
1034 1034
1035 1035 if close_pr:
1036 1036 status_completed = (
1037 1037 calculated_status in [ChangesetStatus.STATUS_APPROVED,
1038 1038 ChangesetStatus.STATUS_REJECTED])
1039 1039 if close_pull_request or status_completed:
1040 1040 PullRequestModel().close_pull_request(
1041 1041 pull_request_id, c.rhodecode_user)
1042 1042 else:
1043 1043 h.flash(_('Closing pull request on other statuses than '
1044 1044 'rejected or approved is forbidden. '
1045 1045 'Calculated status from all reviewers '
1046 1046 'is currently: %s') % calculated_status_lbl,
1047 1047 category='warning')
1048 1048
1049 1049 Session().commit()
1050 1050
1051 1051 if not request.is_xhr:
1052 1052 return redirect(h.url('pullrequest_show', repo_name=repo_name,
1053 1053 pull_request_id=pull_request_id))
1054 1054
1055 1055 data = {
1056 1056 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
1057 1057 }
1058 1058 if comm:
1059 1059 c.co = comm
1060 1060 c.inline_comment = True if comm.line_no else False
1061 1061 data.update(comm.get_dict())
1062 1062 data.update({'rendered_text':
1063 1063 render('changeset/changeset_comment_block.mako')})
1064 1064
1065 1065 return data
1066 1066
1067 1067 @LoginRequired()
1068 1068 @NotAnonymous()
1069 1069 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
1070 1070 'repository.admin')
1071 1071 @auth.CSRFRequired()
1072 1072 @jsonify
1073 1073 def delete_comment(self, repo_name, comment_id):
1074 1074 return self._delete_comment(comment_id)
1075 1075
1076 1076 def _delete_comment(self, comment_id):
1077 1077 comment_id = safe_int(comment_id)
1078 1078 co = ChangesetComment.get_or_404(comment_id)
1079 1079 if co.pull_request.is_closed():
1080 1080 # don't allow deleting comments on closed pull request
1081 1081 raise HTTPForbidden()
1082 1082
1083 1083 is_owner = co.author.user_id == c.rhodecode_user.user_id
1084 1084 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
1085 1085 if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner:
1086 1086 old_calculated_status = co.pull_request.calculated_review_status()
1087 1087 CommentsModel().delete(comment=co)
1088 1088 Session().commit()
1089 1089 calculated_status = co.pull_request.calculated_review_status()
1090 1090 if old_calculated_status != calculated_status:
1091 1091 PullRequestModel()._trigger_pull_request_hook(
1092 1092 co.pull_request, c.rhodecode_user, 'review_status_change')
1093 1093 return True
1094 1094 else:
1095 1095 raise HTTPForbidden()
@@ -1,1588 +1,1588 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import itertools
28 28 import logging
29 29 import os
30 30 import time
31 31 import warnings
32 32
33 33 from zope.cachedescriptors.property import Lazy as LazyProperty
34 34
35 35 from rhodecode.lib.utils2 import safe_str, safe_unicode
36 36 from rhodecode.lib.vcs import connection
37 37 from rhodecode.lib.vcs.utils import author_name, author_email
38 38 from rhodecode.lib.vcs.conf import settings
39 39 from rhodecode.lib.vcs.exceptions import (
40 40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
41 41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
42 42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
43 43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
44 44 RepositoryError)
45 45
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 FILEMODE_DEFAULT = 0100644
51 51 FILEMODE_EXECUTABLE = 0100755
52 52
53 53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
54 54 MergeResponse = collections.namedtuple(
55 55 'MergeResponse',
56 56 ('possible', 'executed', 'merge_ref', 'failure_reason'))
57 57
58 58
59 59 class MergeFailureReason(object):
60 60 """
61 61 Enumeration with all the reasons why the server side merge could fail.
62 62
63 63 DO NOT change the number of the reasons, as they may be stored in the
64 64 database.
65 65
66 66 Changing the name of a reason is acceptable and encouraged to deprecate old
67 67 reasons.
68 68 """
69 69
70 70 # Everything went well.
71 71 NONE = 0
72 72
73 73 # An unexpected exception was raised. Check the logs for more details.
74 74 UNKNOWN = 1
75 75
76 76 # The merge was not successful, there are conflicts.
77 77 MERGE_FAILED = 2
78 78
79 79 # The merge succeeded but we could not push it to the target repository.
80 80 PUSH_FAILED = 3
81 81
82 82 # The specified target is not a head in the target repository.
83 83 TARGET_IS_NOT_HEAD = 4
84 84
85 85 # The source repository contains more branches than the target. Pushing
86 86 # the merge will create additional branches in the target.
87 87 HG_SOURCE_HAS_MORE_BRANCHES = 5
88 88
89 89 # The target reference has multiple heads. That does not allow to correctly
90 90 # identify the target location. This could only happen for mercurial
91 91 # branches.
92 92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
93 93
94 94 # The target repository is locked
95 95 TARGET_IS_LOCKED = 7
96 96
97 97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
98 98 # A involved commit could not be found.
99 99 _DEPRECATED_MISSING_COMMIT = 8
100 100
101 101 # The target repo reference is missing.
102 102 MISSING_TARGET_REF = 9
103 103
104 104 # The source repo reference is missing.
105 105 MISSING_SOURCE_REF = 10
106 106
107 107 # The merge was not successful, there are conflicts related to sub
108 108 # repositories.
109 109 SUBREPO_MERGE_FAILED = 11
110 110
111 111
112 112 class UpdateFailureReason(object):
113 113 """
114 114 Enumeration with all the reasons why the pull request update could fail.
115 115
116 116 DO NOT change the number of the reasons, as they may be stored in the
117 117 database.
118 118
119 119 Changing the name of a reason is acceptable and encouraged to deprecate old
120 120 reasons.
121 121 """
122 122
123 123 # Everything went well.
124 124 NONE = 0
125 125
126 126 # An unexpected exception was raised. Check the logs for more details.
127 127 UNKNOWN = 1
128 128
129 129 # The pull request is up to date.
130 130 NO_CHANGE = 2
131 131
132 132 # The pull request has a reference type that is not supported for update.
133 WRONG_REF_TPYE = 3
133 WRONG_REF_TYPE = 3
134 134
135 135 # Update failed because the target reference is missing.
136 136 MISSING_TARGET_REF = 4
137 137
138 138 # Update failed because the source reference is missing.
139 139 MISSING_SOURCE_REF = 5
140 140
141 141
142 142 class BaseRepository(object):
143 143 """
144 144 Base Repository for final backends
145 145
146 146 .. attribute:: DEFAULT_BRANCH_NAME
147 147
148 148 name of default branch (i.e. "trunk" for svn, "master" for git etc.
149 149
150 150 .. attribute:: commit_ids
151 151
152 152 list of all available commit ids, in ascending order
153 153
154 154 .. attribute:: path
155 155
156 156 absolute path to the repository
157 157
158 158 .. attribute:: bookmarks
159 159
160 160 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
161 161 there are no bookmarks or the backend implementation does not support
162 162 bookmarks.
163 163
164 164 .. attribute:: tags
165 165
166 166 Mapping from name to :term:`Commit ID` of the tag.
167 167
168 168 """
169 169
170 170 DEFAULT_BRANCH_NAME = None
171 171 DEFAULT_CONTACT = u"Unknown"
172 172 DEFAULT_DESCRIPTION = u"unknown"
173 173 EMPTY_COMMIT_ID = '0' * 40
174 174
175 175 path = None
176 176
177 177 def __init__(self, repo_path, config=None, create=False, **kwargs):
178 178 """
179 179 Initializes repository. Raises RepositoryError if repository could
180 180 not be find at the given ``repo_path`` or directory at ``repo_path``
181 181 exists and ``create`` is set to True.
182 182
183 183 :param repo_path: local path of the repository
184 184 :param config: repository configuration
185 185 :param create=False: if set to True, would try to create repository.
186 186 :param src_url=None: if set, should be proper url from which repository
187 187 would be cloned; requires ``create`` parameter to be set to True -
188 188 raises RepositoryError if src_url is set and create evaluates to
189 189 False
190 190 """
191 191 raise NotImplementedError
192 192
193 193 def __repr__(self):
194 194 return '<%s at %s>' % (self.__class__.__name__, self.path)
195 195
196 196 def __len__(self):
197 197 return self.count()
198 198
199 199 def __eq__(self, other):
200 200 same_instance = isinstance(other, self.__class__)
201 201 return same_instance and other.path == self.path
202 202
203 203 def __ne__(self, other):
204 204 return not self.__eq__(other)
205 205
206 206 @LazyProperty
207 207 def EMPTY_COMMIT(self):
208 208 return EmptyCommit(self.EMPTY_COMMIT_ID)
209 209
210 210 @LazyProperty
211 211 def alias(self):
212 212 for k, v in settings.BACKENDS.items():
213 213 if v.split('.')[-1] == str(self.__class__.__name__):
214 214 return k
215 215
216 216 @LazyProperty
217 217 def name(self):
218 218 return safe_unicode(os.path.basename(self.path))
219 219
220 220 @LazyProperty
221 221 def description(self):
222 222 raise NotImplementedError
223 223
224 224 def refs(self):
225 225 """
226 226 returns a `dict` with branches, bookmarks, tags, and closed_branches
227 227 for this repository
228 228 """
229 229 return dict(
230 230 branches=self.branches,
231 231 branches_closed=self.branches_closed,
232 232 tags=self.tags,
233 233 bookmarks=self.bookmarks
234 234 )
235 235
236 236 @LazyProperty
237 237 def branches(self):
238 238 """
239 239 A `dict` which maps branch names to commit ids.
240 240 """
241 241 raise NotImplementedError
242 242
243 243 @LazyProperty
244 244 def tags(self):
245 245 """
246 246 A `dict` which maps tags names to commit ids.
247 247 """
248 248 raise NotImplementedError
249 249
250 250 @LazyProperty
251 251 def size(self):
252 252 """
253 253 Returns combined size in bytes for all repository files
254 254 """
255 255 tip = self.get_commit()
256 256 return tip.size
257 257
258 258 def size_at_commit(self, commit_id):
259 259 commit = self.get_commit(commit_id)
260 260 return commit.size
261 261
262 262 def is_empty(self):
263 263 return not bool(self.commit_ids)
264 264
265 265 @staticmethod
266 266 def check_url(url, config):
267 267 """
268 268 Function will check given url and try to verify if it's a valid
269 269 link.
270 270 """
271 271 raise NotImplementedError
272 272
273 273 @staticmethod
274 274 def is_valid_repository(path):
275 275 """
276 276 Check if given `path` contains a valid repository of this backend
277 277 """
278 278 raise NotImplementedError
279 279
280 280 # ==========================================================================
281 281 # COMMITS
282 282 # ==========================================================================
283 283
284 284 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
285 285 """
286 286 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
287 287 are both None, most recent commit is returned.
288 288
289 289 :param pre_load: Optional. List of commit attributes to load.
290 290
291 291 :raises ``EmptyRepositoryError``: if there are no commits
292 292 """
293 293 raise NotImplementedError
294 294
295 295 def __iter__(self):
296 296 for commit_id in self.commit_ids:
297 297 yield self.get_commit(commit_id=commit_id)
298 298
299 299 def get_commits(
300 300 self, start_id=None, end_id=None, start_date=None, end_date=None,
301 301 branch_name=None, pre_load=None):
302 302 """
303 303 Returns iterator of `BaseCommit` objects from start to end
304 304 not inclusive. This should behave just like a list, ie. end is not
305 305 inclusive.
306 306
307 307 :param start_id: None or str, must be a valid commit id
308 308 :param end_id: None or str, must be a valid commit id
309 309 :param start_date:
310 310 :param end_date:
311 311 :param branch_name:
312 312 :param pre_load:
313 313 """
314 314 raise NotImplementedError
315 315
316 316 def __getitem__(self, key):
317 317 """
318 318 Allows index based access to the commit objects of this repository.
319 319 """
320 320 pre_load = ["author", "branch", "date", "message", "parents"]
321 321 if isinstance(key, slice):
322 322 return self._get_range(key, pre_load)
323 323 return self.get_commit(commit_idx=key, pre_load=pre_load)
324 324
325 325 def _get_range(self, slice_obj, pre_load):
326 326 for commit_id in self.commit_ids.__getitem__(slice_obj):
327 327 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
328 328
329 329 def count(self):
330 330 return len(self.commit_ids)
331 331
332 332 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
333 333 """
334 334 Creates and returns a tag for the given ``commit_id``.
335 335
336 336 :param name: name for new tag
337 337 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
338 338 :param commit_id: commit id for which new tag would be created
339 339 :param message: message of the tag's commit
340 340 :param date: date of tag's commit
341 341
342 342 :raises TagAlreadyExistError: if tag with same name already exists
343 343 """
344 344 raise NotImplementedError
345 345
346 346 def remove_tag(self, name, user, message=None, date=None):
347 347 """
348 348 Removes tag with the given ``name``.
349 349
350 350 :param name: name of the tag to be removed
351 351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 352 :param message: message of the tag's removal commit
353 353 :param date: date of tag's removal commit
354 354
355 355 :raises TagDoesNotExistError: if tag with given name does not exists
356 356 """
357 357 raise NotImplementedError
358 358
359 359 def get_diff(
360 360 self, commit1, commit2, path=None, ignore_whitespace=False,
361 361 context=3, path1=None):
362 362 """
363 363 Returns (git like) *diff*, as plain text. Shows changes introduced by
364 364 `commit2` since `commit1`.
365 365
366 366 :param commit1: Entry point from which diff is shown. Can be
367 367 ``self.EMPTY_COMMIT`` - in this case, patch showing all
368 368 the changes since empty state of the repository until `commit2`
369 369 :param commit2: Until which commit changes should be shown.
370 370 :param path: Can be set to a path of a file to create a diff of that
371 371 file. If `path1` is also set, this value is only associated to
372 372 `commit2`.
373 373 :param ignore_whitespace: If set to ``True``, would not show whitespace
374 374 changes. Defaults to ``False``.
375 375 :param context: How many lines before/after changed lines should be
376 376 shown. Defaults to ``3``.
377 377 :param path1: Can be set to a path to associate with `commit1`. This
378 378 parameter works only for backends which support diff generation for
379 379 different paths. Other backends will raise a `ValueError` if `path1`
380 380 is set and has a different value than `path`.
381 381 :param file_path: filter this diff by given path pattern
382 382 """
383 383 raise NotImplementedError
384 384
385 385 def strip(self, commit_id, branch=None):
386 386 """
387 387 Strip given commit_id from the repository
388 388 """
389 389 raise NotImplementedError
390 390
391 391 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
392 392 """
393 393 Return a latest common ancestor commit if one exists for this repo
394 394 `commit_id1` vs `commit_id2` from `repo2`.
395 395
396 396 :param commit_id1: Commit it from this repository to use as a
397 397 target for the comparison.
398 398 :param commit_id2: Source commit id to use for comparison.
399 399 :param repo2: Source repository to use for comparison.
400 400 """
401 401 raise NotImplementedError
402 402
403 403 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
404 404 """
405 405 Compare this repository's revision `commit_id1` with `commit_id2`.
406 406
407 407 Returns a tuple(commits, ancestor) that would be merged from
408 408 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
409 409 will be returned as ancestor.
410 410
411 411 :param commit_id1: Commit it from this repository to use as a
412 412 target for the comparison.
413 413 :param commit_id2: Source commit id to use for comparison.
414 414 :param repo2: Source repository to use for comparison.
415 415 :param merge: If set to ``True`` will do a merge compare which also
416 416 returns the common ancestor.
417 417 :param pre_load: Optional. List of commit attributes to load.
418 418 """
419 419 raise NotImplementedError
420 420
421 421 def merge(self, target_ref, source_repo, source_ref, workspace_id,
422 422 user_name='', user_email='', message='', dry_run=False,
423 423 use_rebase=False):
424 424 """
425 425 Merge the revisions specified in `source_ref` from `source_repo`
426 426 onto the `target_ref` of this repository.
427 427
428 428 `source_ref` and `target_ref` are named tupls with the following
429 429 fields `type`, `name` and `commit_id`.
430 430
431 431 Returns a MergeResponse named tuple with the following fields
432 432 'possible', 'executed', 'source_commit', 'target_commit',
433 433 'merge_commit'.
434 434
435 435 :param target_ref: `target_ref` points to the commit on top of which
436 436 the `source_ref` should be merged.
437 437 :param source_repo: The repository that contains the commits to be
438 438 merged.
439 439 :param source_ref: `source_ref` points to the topmost commit from
440 440 the `source_repo` which should be merged.
441 441 :param workspace_id: `workspace_id` unique identifier.
442 442 :param user_name: Merge commit `user_name`.
443 443 :param user_email: Merge commit `user_email`.
444 444 :param message: Merge commit `message`.
445 445 :param dry_run: If `True` the merge will not take place.
446 446 :param use_rebase: If `True` commits from the source will be rebased
447 447 on top of the target instead of being merged.
448 448 """
449 449 if dry_run:
450 450 message = message or 'dry_run_merge_message'
451 451 user_email = user_email or 'dry-run-merge@rhodecode.com'
452 452 user_name = user_name or 'Dry-Run User'
453 453 else:
454 454 if not user_name:
455 455 raise ValueError('user_name cannot be empty')
456 456 if not user_email:
457 457 raise ValueError('user_email cannot be empty')
458 458 if not message:
459 459 raise ValueError('message cannot be empty')
460 460
461 461 shadow_repository_path = self._maybe_prepare_merge_workspace(
462 462 workspace_id, target_ref)
463 463
464 464 try:
465 465 return self._merge_repo(
466 466 shadow_repository_path, target_ref, source_repo,
467 467 source_ref, message, user_name, user_email, dry_run=dry_run,
468 468 use_rebase=use_rebase)
469 469 except RepositoryError:
470 470 log.exception(
471 471 'Unexpected failure when running merge, dry-run=%s',
472 472 dry_run)
473 473 return MergeResponse(
474 474 False, False, None, MergeFailureReason.UNKNOWN)
475 475
476 476 def _merge_repo(self, shadow_repository_path, target_ref,
477 477 source_repo, source_ref, merge_message,
478 478 merger_name, merger_email, dry_run=False, use_rebase=False):
479 479 """Internal implementation of merge."""
480 480 raise NotImplementedError
481 481
482 482 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
483 483 """
484 484 Create the merge workspace.
485 485
486 486 :param workspace_id: `workspace_id` unique identifier.
487 487 """
488 488 raise NotImplementedError
489 489
490 490 def cleanup_merge_workspace(self, workspace_id):
491 491 """
492 492 Remove merge workspace.
493 493
494 494 This function MUST not fail in case there is no workspace associated to
495 495 the given `workspace_id`.
496 496
497 497 :param workspace_id: `workspace_id` unique identifier.
498 498 """
499 499 raise NotImplementedError
500 500
501 501 # ========== #
502 502 # COMMIT API #
503 503 # ========== #
504 504
505 505 @LazyProperty
506 506 def in_memory_commit(self):
507 507 """
508 508 Returns :class:`InMemoryCommit` object for this repository.
509 509 """
510 510 raise NotImplementedError
511 511
512 512 # ======================== #
513 513 # UTILITIES FOR SUBCLASSES #
514 514 # ======================== #
515 515
516 516 def _validate_diff_commits(self, commit1, commit2):
517 517 """
518 518 Validates that the given commits are related to this repository.
519 519
520 520 Intended as a utility for sub classes to have a consistent validation
521 521 of input parameters in methods like :meth:`get_diff`.
522 522 """
523 523 self._validate_commit(commit1)
524 524 self._validate_commit(commit2)
525 525 if (isinstance(commit1, EmptyCommit) and
526 526 isinstance(commit2, EmptyCommit)):
527 527 raise ValueError("Cannot compare two empty commits")
528 528
529 529 def _validate_commit(self, commit):
530 530 if not isinstance(commit, BaseCommit):
531 531 raise TypeError(
532 532 "%s is not of type BaseCommit" % repr(commit))
533 533 if commit.repository != self and not isinstance(commit, EmptyCommit):
534 534 raise ValueError(
535 535 "Commit %s must be a valid commit from this repository %s, "
536 536 "related to this repository instead %s." %
537 537 (commit, self, commit.repository))
538 538
539 539 def _validate_commit_id(self, commit_id):
540 540 if not isinstance(commit_id, basestring):
541 541 raise TypeError("commit_id must be a string value")
542 542
543 543 def _validate_commit_idx(self, commit_idx):
544 544 if not isinstance(commit_idx, (int, long)):
545 545 raise TypeError("commit_idx must be a numeric value")
546 546
547 547 def _validate_branch_name(self, branch_name):
548 548 if branch_name and branch_name not in self.branches_all:
549 549 msg = ("Branch %s not found in %s" % (branch_name, self))
550 550 raise BranchDoesNotExistError(msg)
551 551
552 552 #
553 553 # Supporting deprecated API parts
554 554 # TODO: johbo: consider to move this into a mixin
555 555 #
556 556
557 557 @property
558 558 def EMPTY_CHANGESET(self):
559 559 warnings.warn(
560 560 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
561 561 return self.EMPTY_COMMIT_ID
562 562
563 563 @property
564 564 def revisions(self):
565 565 warnings.warn("Use commits attribute instead", DeprecationWarning)
566 566 return self.commit_ids
567 567
568 568 @revisions.setter
569 569 def revisions(self, value):
570 570 warnings.warn("Use commits attribute instead", DeprecationWarning)
571 571 self.commit_ids = value
572 572
573 573 def get_changeset(self, revision=None, pre_load=None):
574 574 warnings.warn("Use get_commit instead", DeprecationWarning)
575 575 commit_id = None
576 576 commit_idx = None
577 577 if isinstance(revision, basestring):
578 578 commit_id = revision
579 579 else:
580 580 commit_idx = revision
581 581 return self.get_commit(
582 582 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
583 583
584 584 def get_changesets(
585 585 self, start=None, end=None, start_date=None, end_date=None,
586 586 branch_name=None, pre_load=None):
587 587 warnings.warn("Use get_commits instead", DeprecationWarning)
588 588 start_id = self._revision_to_commit(start)
589 589 end_id = self._revision_to_commit(end)
590 590 return self.get_commits(
591 591 start_id=start_id, end_id=end_id, start_date=start_date,
592 592 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
593 593
594 594 def _revision_to_commit(self, revision):
595 595 """
596 596 Translates a revision to a commit_id
597 597
598 598 Helps to support the old changeset based API which allows to use
599 599 commit ids and commit indices interchangeable.
600 600 """
601 601 if revision is None:
602 602 return revision
603 603
604 604 if isinstance(revision, basestring):
605 605 commit_id = revision
606 606 else:
607 607 commit_id = self.commit_ids[revision]
608 608 return commit_id
609 609
610 610 @property
611 611 def in_memory_changeset(self):
612 612 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
613 613 return self.in_memory_commit
614 614
615 615
616 616 class BaseCommit(object):
617 617 """
618 618 Each backend should implement it's commit representation.
619 619
620 620 **Attributes**
621 621
622 622 ``repository``
623 623 repository object within which commit exists
624 624
625 625 ``id``
626 626 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
627 627 just ``tip``.
628 628
629 629 ``raw_id``
630 630 raw commit representation (i.e. full 40 length sha for git
631 631 backend)
632 632
633 633 ``short_id``
634 634 shortened (if apply) version of ``raw_id``; it would be simple
635 635 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
636 636 as ``raw_id`` for subversion
637 637
638 638 ``idx``
639 639 commit index
640 640
641 641 ``files``
642 642 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
643 643
644 644 ``dirs``
645 645 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
646 646
647 647 ``nodes``
648 648 combined list of ``Node`` objects
649 649
650 650 ``author``
651 651 author of the commit, as unicode
652 652
653 653 ``message``
654 654 message of the commit, as unicode
655 655
656 656 ``parents``
657 657 list of parent commits
658 658
659 659 """
660 660
661 661 branch = None
662 662 """
663 663 Depending on the backend this should be set to the branch name of the
664 664 commit. Backends not supporting branches on commits should leave this
665 665 value as ``None``.
666 666 """
667 667
668 668 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
669 669 """
670 670 This template is used to generate a default prefix for repository archives
671 671 if no prefix has been specified.
672 672 """
673 673
674 674 def __str__(self):
675 675 return '<%s at %s:%s>' % (
676 676 self.__class__.__name__, self.idx, self.short_id)
677 677
678 678 def __repr__(self):
679 679 return self.__str__()
680 680
681 681 def __unicode__(self):
682 682 return u'%s:%s' % (self.idx, self.short_id)
683 683
684 684 def __eq__(self, other):
685 685 same_instance = isinstance(other, self.__class__)
686 686 return same_instance and self.raw_id == other.raw_id
687 687
688 688 def __json__(self):
689 689 parents = []
690 690 try:
691 691 for parent in self.parents:
692 692 parents.append({'raw_id': parent.raw_id})
693 693 except NotImplementedError:
694 694 # empty commit doesn't have parents implemented
695 695 pass
696 696
697 697 return {
698 698 'short_id': self.short_id,
699 699 'raw_id': self.raw_id,
700 700 'revision': self.idx,
701 701 'message': self.message,
702 702 'date': self.date,
703 703 'author': self.author,
704 704 'parents': parents,
705 705 'branch': self.branch
706 706 }
707 707
708 708 @LazyProperty
709 709 def last(self):
710 710 """
711 711 ``True`` if this is last commit in repository, ``False``
712 712 otherwise; trying to access this attribute while there is no
713 713 commits would raise `EmptyRepositoryError`
714 714 """
715 715 if self.repository is None:
716 716 raise CommitError("Cannot check if it's most recent commit")
717 717 return self.raw_id == self.repository.commit_ids[-1]
718 718
719 719 @LazyProperty
720 720 def parents(self):
721 721 """
722 722 Returns list of parent commits.
723 723 """
724 724 raise NotImplementedError
725 725
726 726 @property
727 727 def merge(self):
728 728 """
729 729 Returns boolean if commit is a merge.
730 730 """
731 731 return len(self.parents) > 1
732 732
733 733 @LazyProperty
734 734 def children(self):
735 735 """
736 736 Returns list of child commits.
737 737 """
738 738 raise NotImplementedError
739 739
740 740 @LazyProperty
741 741 def id(self):
742 742 """
743 743 Returns string identifying this commit.
744 744 """
745 745 raise NotImplementedError
746 746
747 747 @LazyProperty
748 748 def raw_id(self):
749 749 """
750 750 Returns raw string identifying this commit.
751 751 """
752 752 raise NotImplementedError
753 753
754 754 @LazyProperty
755 755 def short_id(self):
756 756 """
757 757 Returns shortened version of ``raw_id`` attribute, as string,
758 758 identifying this commit, useful for presentation to users.
759 759 """
760 760 raise NotImplementedError
761 761
762 762 @LazyProperty
763 763 def idx(self):
764 764 """
765 765 Returns integer identifying this commit.
766 766 """
767 767 raise NotImplementedError
768 768
769 769 @LazyProperty
770 770 def committer(self):
771 771 """
772 772 Returns committer for this commit
773 773 """
774 774 raise NotImplementedError
775 775
776 776 @LazyProperty
777 777 def committer_name(self):
778 778 """
779 779 Returns committer name for this commit
780 780 """
781 781
782 782 return author_name(self.committer)
783 783
784 784 @LazyProperty
785 785 def committer_email(self):
786 786 """
787 787 Returns committer email address for this commit
788 788 """
789 789
790 790 return author_email(self.committer)
791 791
792 792 @LazyProperty
793 793 def author(self):
794 794 """
795 795 Returns author for this commit
796 796 """
797 797
798 798 raise NotImplementedError
799 799
800 800 @LazyProperty
801 801 def author_name(self):
802 802 """
803 803 Returns author name for this commit
804 804 """
805 805
806 806 return author_name(self.author)
807 807
808 808 @LazyProperty
809 809 def author_email(self):
810 810 """
811 811 Returns author email address for this commit
812 812 """
813 813
814 814 return author_email(self.author)
815 815
816 816 def get_file_mode(self, path):
817 817 """
818 818 Returns stat mode of the file at `path`.
819 819 """
820 820 raise NotImplementedError
821 821
822 822 def is_link(self, path):
823 823 """
824 824 Returns ``True`` if given `path` is a symlink
825 825 """
826 826 raise NotImplementedError
827 827
828 828 def get_file_content(self, path):
829 829 """
830 830 Returns content of the file at the given `path`.
831 831 """
832 832 raise NotImplementedError
833 833
834 834 def get_file_size(self, path):
835 835 """
836 836 Returns size of the file at the given `path`.
837 837 """
838 838 raise NotImplementedError
839 839
840 840 def get_file_commit(self, path, pre_load=None):
841 841 """
842 842 Returns last commit of the file at the given `path`.
843 843
844 844 :param pre_load: Optional. List of commit attributes to load.
845 845 """
846 846 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
847 847 if not commits:
848 848 raise RepositoryError(
849 849 'Failed to fetch history for path {}. '
850 850 'Please check if such path exists in your repository'.format(
851 851 path))
852 852 return commits[0]
853 853
854 854 def get_file_history(self, path, limit=None, pre_load=None):
855 855 """
856 856 Returns history of file as reversed list of :class:`BaseCommit`
857 857 objects for which file at given `path` has been modified.
858 858
859 859 :param limit: Optional. Allows to limit the size of the returned
860 860 history. This is intended as a hint to the underlying backend, so
861 861 that it can apply optimizations depending on the limit.
862 862 :param pre_load: Optional. List of commit attributes to load.
863 863 """
864 864 raise NotImplementedError
865 865
866 866 def get_file_annotate(self, path, pre_load=None):
867 867 """
868 868 Returns a generator of four element tuples with
869 869 lineno, sha, commit lazy loader and line
870 870
871 871 :param pre_load: Optional. List of commit attributes to load.
872 872 """
873 873 raise NotImplementedError
874 874
875 875 def get_nodes(self, path):
876 876 """
877 877 Returns combined ``DirNode`` and ``FileNode`` objects list representing
878 878 state of commit at the given ``path``.
879 879
880 880 :raises ``CommitError``: if node at the given ``path`` is not
881 881 instance of ``DirNode``
882 882 """
883 883 raise NotImplementedError
884 884
885 885 def get_node(self, path):
886 886 """
887 887 Returns ``Node`` object from the given ``path``.
888 888
889 889 :raises ``NodeDoesNotExistError``: if there is no node at the given
890 890 ``path``
891 891 """
892 892 raise NotImplementedError
893 893
894 894 def get_largefile_node(self, path):
895 895 """
896 896 Returns the path to largefile from Mercurial/Git-lfs storage.
897 897 or None if it's not a largefile node
898 898 """
899 899 return None
900 900
901 901 def archive_repo(self, file_path, kind='tgz', subrepos=None,
902 902 prefix=None, write_metadata=False, mtime=None):
903 903 """
904 904 Creates an archive containing the contents of the repository.
905 905
906 906 :param file_path: path to the file which to create the archive.
907 907 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
908 908 :param prefix: name of root directory in archive.
909 909 Default is repository name and commit's short_id joined with dash:
910 910 ``"{repo_name}-{short_id}"``.
911 911 :param write_metadata: write a metadata file into archive.
912 912 :param mtime: custom modification time for archive creation, defaults
913 913 to time.time() if not given.
914 914
915 915 :raise VCSError: If prefix has a problem.
916 916 """
917 917 allowed_kinds = settings.ARCHIVE_SPECS.keys()
918 918 if kind not in allowed_kinds:
919 919 raise ImproperArchiveTypeError(
920 920 'Archive kind (%s) not supported use one of %s' %
921 921 (kind, allowed_kinds))
922 922
923 923 prefix = self._validate_archive_prefix(prefix)
924 924
925 925 mtime = mtime or time.mktime(self.date.timetuple())
926 926
927 927 file_info = []
928 928 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
929 929 for _r, _d, files in cur_rev.walk('/'):
930 930 for f in files:
931 931 f_path = os.path.join(prefix, f.path)
932 932 file_info.append(
933 933 (f_path, f.mode, f.is_link(), f.raw_bytes))
934 934
935 935 if write_metadata:
936 936 metadata = [
937 937 ('repo_name', self.repository.name),
938 938 ('rev', self.raw_id),
939 939 ('create_time', mtime),
940 940 ('branch', self.branch),
941 941 ('tags', ','.join(self.tags)),
942 942 ]
943 943 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
944 944 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
945 945
946 946 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
947 947
948 948 def _validate_archive_prefix(self, prefix):
949 949 if prefix is None:
950 950 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
951 951 repo_name=safe_str(self.repository.name),
952 952 short_id=self.short_id)
953 953 elif not isinstance(prefix, str):
954 954 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
955 955 elif prefix.startswith('/'):
956 956 raise VCSError("Prefix cannot start with leading slash")
957 957 elif prefix.strip() == '':
958 958 raise VCSError("Prefix cannot be empty")
959 959 return prefix
960 960
961 961 @LazyProperty
962 962 def root(self):
963 963 """
964 964 Returns ``RootNode`` object for this commit.
965 965 """
966 966 return self.get_node('')
967 967
968 968 def next(self, branch=None):
969 969 """
970 970 Returns next commit from current, if branch is gives it will return
971 971 next commit belonging to this branch
972 972
973 973 :param branch: show commits within the given named branch
974 974 """
975 975 indexes = xrange(self.idx + 1, self.repository.count())
976 976 return self._find_next(indexes, branch)
977 977
978 978 def prev(self, branch=None):
979 979 """
980 980 Returns previous commit from current, if branch is gives it will
981 981 return previous commit belonging to this branch
982 982
983 983 :param branch: show commit within the given named branch
984 984 """
985 985 indexes = xrange(self.idx - 1, -1, -1)
986 986 return self._find_next(indexes, branch)
987 987
988 988 def _find_next(self, indexes, branch=None):
989 989 if branch and self.branch != branch:
990 990 raise VCSError('Branch option used on commit not belonging '
991 991 'to that branch')
992 992
993 993 for next_idx in indexes:
994 994 commit = self.repository.get_commit(commit_idx=next_idx)
995 995 if branch and branch != commit.branch:
996 996 continue
997 997 return commit
998 998 raise CommitDoesNotExistError
999 999
1000 1000 def diff(self, ignore_whitespace=True, context=3):
1001 1001 """
1002 1002 Returns a `Diff` object representing the change made by this commit.
1003 1003 """
1004 1004 parent = (
1005 1005 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1006 1006 diff = self.repository.get_diff(
1007 1007 parent, self,
1008 1008 ignore_whitespace=ignore_whitespace,
1009 1009 context=context)
1010 1010 return diff
1011 1011
1012 1012 @LazyProperty
1013 1013 def added(self):
1014 1014 """
1015 1015 Returns list of added ``FileNode`` objects.
1016 1016 """
1017 1017 raise NotImplementedError
1018 1018
1019 1019 @LazyProperty
1020 1020 def changed(self):
1021 1021 """
1022 1022 Returns list of modified ``FileNode`` objects.
1023 1023 """
1024 1024 raise NotImplementedError
1025 1025
1026 1026 @LazyProperty
1027 1027 def removed(self):
1028 1028 """
1029 1029 Returns list of removed ``FileNode`` objects.
1030 1030 """
1031 1031 raise NotImplementedError
1032 1032
1033 1033 @LazyProperty
1034 1034 def size(self):
1035 1035 """
1036 1036 Returns total number of bytes from contents of all filenodes.
1037 1037 """
1038 1038 return sum((node.size for node in self.get_filenodes_generator()))
1039 1039
1040 1040 def walk(self, topurl=''):
1041 1041 """
1042 1042 Similar to os.walk method. Insted of filesystem it walks through
1043 1043 commit starting at given ``topurl``. Returns generator of tuples
1044 1044 (topnode, dirnodes, filenodes).
1045 1045 """
1046 1046 topnode = self.get_node(topurl)
1047 1047 if not topnode.is_dir():
1048 1048 return
1049 1049 yield (topnode, topnode.dirs, topnode.files)
1050 1050 for dirnode in topnode.dirs:
1051 1051 for tup in self.walk(dirnode.path):
1052 1052 yield tup
1053 1053
1054 1054 def get_filenodes_generator(self):
1055 1055 """
1056 1056 Returns generator that yields *all* file nodes.
1057 1057 """
1058 1058 for topnode, dirs, files in self.walk():
1059 1059 for node in files:
1060 1060 yield node
1061 1061
1062 1062 #
1063 1063 # Utilities for sub classes to support consistent behavior
1064 1064 #
1065 1065
1066 1066 def no_node_at_path(self, path):
1067 1067 return NodeDoesNotExistError(
1068 1068 "There is no file nor directory at the given path: "
1069 1069 "'%s' at commit %s" % (path, self.short_id))
1070 1070
1071 1071 def _fix_path(self, path):
1072 1072 """
1073 1073 Paths are stored without trailing slash so we need to get rid off it if
1074 1074 needed.
1075 1075 """
1076 1076 return path.rstrip('/')
1077 1077
1078 1078 #
1079 1079 # Deprecated API based on changesets
1080 1080 #
1081 1081
1082 1082 @property
1083 1083 def revision(self):
1084 1084 warnings.warn("Use idx instead", DeprecationWarning)
1085 1085 return self.idx
1086 1086
1087 1087 @revision.setter
1088 1088 def revision(self, value):
1089 1089 warnings.warn("Use idx instead", DeprecationWarning)
1090 1090 self.idx = value
1091 1091
1092 1092 def get_file_changeset(self, path):
1093 1093 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1094 1094 return self.get_file_commit(path)
1095 1095
1096 1096
1097 1097 class BaseChangesetClass(type):
1098 1098
1099 1099 def __instancecheck__(self, instance):
1100 1100 return isinstance(instance, BaseCommit)
1101 1101
1102 1102
1103 1103 class BaseChangeset(BaseCommit):
1104 1104
1105 1105 __metaclass__ = BaseChangesetClass
1106 1106
1107 1107 def __new__(cls, *args, **kwargs):
1108 1108 warnings.warn(
1109 1109 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1110 1110 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1111 1111
1112 1112
1113 1113 class BaseInMemoryCommit(object):
1114 1114 """
1115 1115 Represents differences between repository's state (most recent head) and
1116 1116 changes made *in place*.
1117 1117
1118 1118 **Attributes**
1119 1119
1120 1120 ``repository``
1121 1121 repository object for this in-memory-commit
1122 1122
1123 1123 ``added``
1124 1124 list of ``FileNode`` objects marked as *added*
1125 1125
1126 1126 ``changed``
1127 1127 list of ``FileNode`` objects marked as *changed*
1128 1128
1129 1129 ``removed``
1130 1130 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1131 1131 *removed*
1132 1132
1133 1133 ``parents``
1134 1134 list of :class:`BaseCommit` instances representing parents of
1135 1135 in-memory commit. Should always be 2-element sequence.
1136 1136
1137 1137 """
1138 1138
1139 1139 def __init__(self, repository):
1140 1140 self.repository = repository
1141 1141 self.added = []
1142 1142 self.changed = []
1143 1143 self.removed = []
1144 1144 self.parents = []
1145 1145
1146 1146 def add(self, *filenodes):
1147 1147 """
1148 1148 Marks given ``FileNode`` objects as *to be committed*.
1149 1149
1150 1150 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1151 1151 latest commit
1152 1152 :raises ``NodeAlreadyAddedError``: if node with same path is already
1153 1153 marked as *added*
1154 1154 """
1155 1155 # Check if not already marked as *added* first
1156 1156 for node in filenodes:
1157 1157 if node.path in (n.path for n in self.added):
1158 1158 raise NodeAlreadyAddedError(
1159 1159 "Such FileNode %s is already marked for addition"
1160 1160 % node.path)
1161 1161 for node in filenodes:
1162 1162 self.added.append(node)
1163 1163
1164 1164 def change(self, *filenodes):
1165 1165 """
1166 1166 Marks given ``FileNode`` objects to be *changed* in next commit.
1167 1167
1168 1168 :raises ``EmptyRepositoryError``: if there are no commits yet
1169 1169 :raises ``NodeAlreadyExistsError``: if node with same path is already
1170 1170 marked to be *changed*
1171 1171 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1172 1172 marked to be *removed*
1173 1173 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1174 1174 commit
1175 1175 :raises ``NodeNotChangedError``: if node hasn't really be changed
1176 1176 """
1177 1177 for node in filenodes:
1178 1178 if node.path in (n.path for n in self.removed):
1179 1179 raise NodeAlreadyRemovedError(
1180 1180 "Node at %s is already marked as removed" % node.path)
1181 1181 try:
1182 1182 self.repository.get_commit()
1183 1183 except EmptyRepositoryError:
1184 1184 raise EmptyRepositoryError(
1185 1185 "Nothing to change - try to *add* new nodes rather than "
1186 1186 "changing them")
1187 1187 for node in filenodes:
1188 1188 if node.path in (n.path for n in self.changed):
1189 1189 raise NodeAlreadyChangedError(
1190 1190 "Node at '%s' is already marked as changed" % node.path)
1191 1191 self.changed.append(node)
1192 1192
1193 1193 def remove(self, *filenodes):
1194 1194 """
1195 1195 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1196 1196 *removed* in next commit.
1197 1197
1198 1198 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1199 1199 be *removed*
1200 1200 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1201 1201 be *changed*
1202 1202 """
1203 1203 for node in filenodes:
1204 1204 if node.path in (n.path for n in self.removed):
1205 1205 raise NodeAlreadyRemovedError(
1206 1206 "Node is already marked to for removal at %s" % node.path)
1207 1207 if node.path in (n.path for n in self.changed):
1208 1208 raise NodeAlreadyChangedError(
1209 1209 "Node is already marked to be changed at %s" % node.path)
1210 1210 # We only mark node as *removed* - real removal is done by
1211 1211 # commit method
1212 1212 self.removed.append(node)
1213 1213
1214 1214 def reset(self):
1215 1215 """
1216 1216 Resets this instance to initial state (cleans ``added``, ``changed``
1217 1217 and ``removed`` lists).
1218 1218 """
1219 1219 self.added = []
1220 1220 self.changed = []
1221 1221 self.removed = []
1222 1222 self.parents = []
1223 1223
1224 1224 def get_ipaths(self):
1225 1225 """
1226 1226 Returns generator of paths from nodes marked as added, changed or
1227 1227 removed.
1228 1228 """
1229 1229 for node in itertools.chain(self.added, self.changed, self.removed):
1230 1230 yield node.path
1231 1231
1232 1232 def get_paths(self):
1233 1233 """
1234 1234 Returns list of paths from nodes marked as added, changed or removed.
1235 1235 """
1236 1236 return list(self.get_ipaths())
1237 1237
1238 1238 def check_integrity(self, parents=None):
1239 1239 """
1240 1240 Checks in-memory commit's integrity. Also, sets parents if not
1241 1241 already set.
1242 1242
1243 1243 :raises CommitError: if any error occurs (i.e.
1244 1244 ``NodeDoesNotExistError``).
1245 1245 """
1246 1246 if not self.parents:
1247 1247 parents = parents or []
1248 1248 if len(parents) == 0:
1249 1249 try:
1250 1250 parents = [self.repository.get_commit(), None]
1251 1251 except EmptyRepositoryError:
1252 1252 parents = [None, None]
1253 1253 elif len(parents) == 1:
1254 1254 parents += [None]
1255 1255 self.parents = parents
1256 1256
1257 1257 # Local parents, only if not None
1258 1258 parents = [p for p in self.parents if p]
1259 1259
1260 1260 # Check nodes marked as added
1261 1261 for p in parents:
1262 1262 for node in self.added:
1263 1263 try:
1264 1264 p.get_node(node.path)
1265 1265 except NodeDoesNotExistError:
1266 1266 pass
1267 1267 else:
1268 1268 raise NodeAlreadyExistsError(
1269 1269 "Node `%s` already exists at %s" % (node.path, p))
1270 1270
1271 1271 # Check nodes marked as changed
1272 1272 missing = set(self.changed)
1273 1273 not_changed = set(self.changed)
1274 1274 if self.changed and not parents:
1275 1275 raise NodeDoesNotExistError(str(self.changed[0].path))
1276 1276 for p in parents:
1277 1277 for node in self.changed:
1278 1278 try:
1279 1279 old = p.get_node(node.path)
1280 1280 missing.remove(node)
1281 1281 # if content actually changed, remove node from not_changed
1282 1282 if old.content != node.content:
1283 1283 not_changed.remove(node)
1284 1284 except NodeDoesNotExistError:
1285 1285 pass
1286 1286 if self.changed and missing:
1287 1287 raise NodeDoesNotExistError(
1288 1288 "Node `%s` marked as modified but missing in parents: %s"
1289 1289 % (node.path, parents))
1290 1290
1291 1291 if self.changed and not_changed:
1292 1292 raise NodeNotChangedError(
1293 1293 "Node `%s` wasn't actually changed (parents: %s)"
1294 1294 % (not_changed.pop().path, parents))
1295 1295
1296 1296 # Check nodes marked as removed
1297 1297 if self.removed and not parents:
1298 1298 raise NodeDoesNotExistError(
1299 1299 "Cannot remove node at %s as there "
1300 1300 "were no parents specified" % self.removed[0].path)
1301 1301 really_removed = set()
1302 1302 for p in parents:
1303 1303 for node in self.removed:
1304 1304 try:
1305 1305 p.get_node(node.path)
1306 1306 really_removed.add(node)
1307 1307 except CommitError:
1308 1308 pass
1309 1309 not_removed = set(self.removed) - really_removed
1310 1310 if not_removed:
1311 1311 # TODO: johbo: This code branch does not seem to be covered
1312 1312 raise NodeDoesNotExistError(
1313 1313 "Cannot remove node at %s from "
1314 1314 "following parents: %s" % (not_removed, parents))
1315 1315
1316 1316 def commit(
1317 1317 self, message, author, parents=None, branch=None, date=None,
1318 1318 **kwargs):
1319 1319 """
1320 1320 Performs in-memory commit (doesn't check workdir in any way) and
1321 1321 returns newly created :class:`BaseCommit`. Updates repository's
1322 1322 attribute `commits`.
1323 1323
1324 1324 .. note::
1325 1325
1326 1326 While overriding this method each backend's should call
1327 1327 ``self.check_integrity(parents)`` in the first place.
1328 1328
1329 1329 :param message: message of the commit
1330 1330 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1331 1331 :param parents: single parent or sequence of parents from which commit
1332 1332 would be derived
1333 1333 :param date: ``datetime.datetime`` instance. Defaults to
1334 1334 ``datetime.datetime.now()``.
1335 1335 :param branch: branch name, as string. If none given, default backend's
1336 1336 branch would be used.
1337 1337
1338 1338 :raises ``CommitError``: if any error occurs while committing
1339 1339 """
1340 1340 raise NotImplementedError
1341 1341
1342 1342
1343 1343 class BaseInMemoryChangesetClass(type):
1344 1344
1345 1345 def __instancecheck__(self, instance):
1346 1346 return isinstance(instance, BaseInMemoryCommit)
1347 1347
1348 1348
1349 1349 class BaseInMemoryChangeset(BaseInMemoryCommit):
1350 1350
1351 1351 __metaclass__ = BaseInMemoryChangesetClass
1352 1352
1353 1353 def __new__(cls, *args, **kwargs):
1354 1354 warnings.warn(
1355 1355 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1356 1356 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1357 1357
1358 1358
1359 1359 class EmptyCommit(BaseCommit):
1360 1360 """
1361 1361 An dummy empty commit. It's possible to pass hash when creating
1362 1362 an EmptyCommit
1363 1363 """
1364 1364
1365 1365 def __init__(
1366 1366 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1367 1367 message='', author='', date=None):
1368 1368 self._empty_commit_id = commit_id
1369 1369 # TODO: johbo: Solve idx parameter, default value does not make
1370 1370 # too much sense
1371 1371 self.idx = idx
1372 1372 self.message = message
1373 1373 self.author = author
1374 1374 self.date = date or datetime.datetime.fromtimestamp(0)
1375 1375 self.repository = repo
1376 1376 self.alias = alias
1377 1377
1378 1378 @LazyProperty
1379 1379 def raw_id(self):
1380 1380 """
1381 1381 Returns raw string identifying this commit, useful for web
1382 1382 representation.
1383 1383 """
1384 1384
1385 1385 return self._empty_commit_id
1386 1386
1387 1387 @LazyProperty
1388 1388 def branch(self):
1389 1389 if self.alias:
1390 1390 from rhodecode.lib.vcs.backends import get_backend
1391 1391 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1392 1392
1393 1393 @LazyProperty
1394 1394 def short_id(self):
1395 1395 return self.raw_id[:12]
1396 1396
1397 1397 @LazyProperty
1398 1398 def id(self):
1399 1399 return self.raw_id
1400 1400
1401 1401 def get_file_commit(self, path):
1402 1402 return self
1403 1403
1404 1404 def get_file_content(self, path):
1405 1405 return u''
1406 1406
1407 1407 def get_file_size(self, path):
1408 1408 return 0
1409 1409
1410 1410
1411 1411 class EmptyChangesetClass(type):
1412 1412
1413 1413 def __instancecheck__(self, instance):
1414 1414 return isinstance(instance, EmptyCommit)
1415 1415
1416 1416
1417 1417 class EmptyChangeset(EmptyCommit):
1418 1418
1419 1419 __metaclass__ = EmptyChangesetClass
1420 1420
1421 1421 def __new__(cls, *args, **kwargs):
1422 1422 warnings.warn(
1423 1423 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1424 1424 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1425 1425
1426 1426 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1427 1427 alias=None, revision=-1, message='', author='', date=None):
1428 1428 if requested_revision is not None:
1429 1429 warnings.warn(
1430 1430 "Parameter requested_revision not supported anymore",
1431 1431 DeprecationWarning)
1432 1432 super(EmptyChangeset, self).__init__(
1433 1433 commit_id=cs, repo=repo, alias=alias, idx=revision,
1434 1434 message=message, author=author, date=date)
1435 1435
1436 1436 @property
1437 1437 def revision(self):
1438 1438 warnings.warn("Use idx instead", DeprecationWarning)
1439 1439 return self.idx
1440 1440
1441 1441 @revision.setter
1442 1442 def revision(self, value):
1443 1443 warnings.warn("Use idx instead", DeprecationWarning)
1444 1444 self.idx = value
1445 1445
1446 1446
1447 1447 class EmptyRepository(BaseRepository):
1448 1448 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1449 1449 pass
1450 1450
1451 1451 def get_diff(self, *args, **kwargs):
1452 1452 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1453 1453 return GitDiff('')
1454 1454
1455 1455
1456 1456 class CollectionGenerator(object):
1457 1457
1458 1458 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1459 1459 self.repo = repo
1460 1460 self.commit_ids = commit_ids
1461 1461 # TODO: (oliver) this isn't currently hooked up
1462 1462 self.collection_size = None
1463 1463 self.pre_load = pre_load
1464 1464
1465 1465 def __len__(self):
1466 1466 if self.collection_size is not None:
1467 1467 return self.collection_size
1468 1468 return self.commit_ids.__len__()
1469 1469
1470 1470 def __iter__(self):
1471 1471 for commit_id in self.commit_ids:
1472 1472 # TODO: johbo: Mercurial passes in commit indices or commit ids
1473 1473 yield self._commit_factory(commit_id)
1474 1474
1475 1475 def _commit_factory(self, commit_id):
1476 1476 """
1477 1477 Allows backends to override the way commits are generated.
1478 1478 """
1479 1479 return self.repo.get_commit(commit_id=commit_id,
1480 1480 pre_load=self.pre_load)
1481 1481
1482 1482 def __getslice__(self, i, j):
1483 1483 """
1484 1484 Returns an iterator of sliced repository
1485 1485 """
1486 1486 commit_ids = self.commit_ids[i:j]
1487 1487 return self.__class__(
1488 1488 self.repo, commit_ids, pre_load=self.pre_load)
1489 1489
1490 1490 def __repr__(self):
1491 1491 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1492 1492
1493 1493
1494 1494 class Config(object):
1495 1495 """
1496 1496 Represents the configuration for a repository.
1497 1497
1498 1498 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1499 1499 standard library. It implements only the needed subset.
1500 1500 """
1501 1501
1502 1502 def __init__(self):
1503 1503 self._values = {}
1504 1504
1505 1505 def copy(self):
1506 1506 clone = Config()
1507 1507 for section, values in self._values.items():
1508 1508 clone._values[section] = values.copy()
1509 1509 return clone
1510 1510
1511 1511 def __repr__(self):
1512 1512 return '<Config(%s sections) at %s>' % (
1513 1513 len(self._values), hex(id(self)))
1514 1514
1515 1515 def items(self, section):
1516 1516 return self._values.get(section, {}).iteritems()
1517 1517
1518 1518 def get(self, section, option):
1519 1519 return self._values.get(section, {}).get(option)
1520 1520
1521 1521 def set(self, section, option, value):
1522 1522 section_values = self._values.setdefault(section, {})
1523 1523 section_values[option] = value
1524 1524
1525 1525 def clear_section(self, section):
1526 1526 self._values[section] = {}
1527 1527
1528 1528 def serialize(self):
1529 1529 """
1530 1530 Creates a list of three tuples (section, key, value) representing
1531 1531 this config object.
1532 1532 """
1533 1533 items = []
1534 1534 for section in self._values:
1535 1535 for option, value in self._values[section].items():
1536 1536 items.append(
1537 1537 (safe_str(section), safe_str(option), safe_str(value)))
1538 1538 return items
1539 1539
1540 1540
1541 1541 class Diff(object):
1542 1542 """
1543 1543 Represents a diff result from a repository backend.
1544 1544
1545 1545 Subclasses have to provide a backend specific value for
1546 1546 :attr:`_header_re` and :attr:`_meta_re`.
1547 1547 """
1548 1548 _meta_re = None
1549 1549 _header_re = None
1550 1550
1551 1551 def __init__(self, raw_diff):
1552 1552 self.raw = raw_diff
1553 1553
1554 1554 def chunks(self):
1555 1555 """
1556 1556 split the diff in chunks of separate --git a/file b/file chunks
1557 1557 to make diffs consistent we must prepend with \n, and make sure
1558 1558 we can detect last chunk as this was also has special rule
1559 1559 """
1560 1560
1561 1561 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1562 1562 header = diff_parts[0]
1563 1563
1564 1564 if self._meta_re:
1565 1565 match = self._meta_re.match(header)
1566 1566
1567 1567 chunks = diff_parts[1:]
1568 1568 total_chunks = len(chunks)
1569 1569
1570 1570 return (
1571 1571 DiffChunk(chunk, self, cur_chunk == total_chunks)
1572 1572 for cur_chunk, chunk in enumerate(chunks, start=1))
1573 1573
1574 1574
1575 1575 class DiffChunk(object):
1576 1576
1577 1577 def __init__(self, chunk, diff, last_chunk):
1578 1578 self._diff = diff
1579 1579
1580 1580 # since we split by \ndiff --git that part is lost from original diff
1581 1581 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1582 1582 if not last_chunk:
1583 1583 chunk += '\n'
1584 1584
1585 1585 match = self._diff._header_re.match(chunk)
1586 1586 self.header = match.groupdict()
1587 1587 self.diff = chunk[match.end():]
1588 1588 self.raw = chunk
@@ -1,1469 +1,1469 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31
32 32 from pylons.i18n.translation import _
33 33 from pylons.i18n.translation import lazy_ugettext
34 34 from sqlalchemy import or_
35 35
36 36 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 37 from rhodecode.lib.compat import OrderedDict
38 38 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
39 39 from rhodecode.lib.markup_renderer import (
40 40 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
41 41 from rhodecode.lib.utils import action_logger
42 42 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 43 from rhodecode.lib.vcs.backends.base import (
44 44 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 45 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 46 from rhodecode.lib.vcs.exceptions import (
47 47 CommitDoesNotExistError, EmptyRepositoryError)
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 50 from rhodecode.model.comment import CommentsModel
51 51 from rhodecode.model.db import (
52 52 PullRequest, PullRequestReviewers, ChangesetStatus,
53 53 PullRequestVersion, ChangesetComment, Repository)
54 54 from rhodecode.model.meta import Session
55 55 from rhodecode.model.notification import NotificationModel, \
56 56 EmailNotificationModel
57 57 from rhodecode.model.scm import ScmModel
58 58 from rhodecode.model.settings import VcsSettingsModel
59 59
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 # Data structure to hold the response data when updating commits during a pull
65 65 # request update.
66 66 UpdateResponse = namedtuple('UpdateResponse', [
67 67 'executed', 'reason', 'new', 'old', 'changes',
68 68 'source_changed', 'target_changed'])
69 69
70 70
71 71 class PullRequestModel(BaseModel):
72 72
73 73 cls = PullRequest
74 74
75 75 DIFF_CONTEXT = 3
76 76
77 77 MERGE_STATUS_MESSAGES = {
78 78 MergeFailureReason.NONE: lazy_ugettext(
79 79 'This pull request can be automatically merged.'),
80 80 MergeFailureReason.UNKNOWN: lazy_ugettext(
81 81 'This pull request cannot be merged because of an unhandled'
82 82 ' exception.'),
83 83 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
84 84 'This pull request cannot be merged because of merge conflicts.'),
85 85 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
86 86 'This pull request could not be merged because push to target'
87 87 ' failed.'),
88 88 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
89 89 'This pull request cannot be merged because the target is not a'
90 90 ' head.'),
91 91 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
92 92 'This pull request cannot be merged because the source contains'
93 93 ' more branches than the target.'),
94 94 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
95 95 'This pull request cannot be merged because the target has'
96 96 ' multiple heads.'),
97 97 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
98 98 'This pull request cannot be merged because the target repository'
99 99 ' is locked.'),
100 100 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
101 101 'This pull request cannot be merged because the target or the '
102 102 'source reference is missing.'),
103 103 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
104 104 'This pull request cannot be merged because the target '
105 105 'reference is missing.'),
106 106 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
107 107 'This pull request cannot be merged because the source '
108 108 'reference is missing.'),
109 109 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
110 110 'This pull request cannot be merged because of conflicts related '
111 111 'to sub repositories.'),
112 112 }
113 113
114 114 UPDATE_STATUS_MESSAGES = {
115 115 UpdateFailureReason.NONE: lazy_ugettext(
116 116 'Pull request update successful.'),
117 117 UpdateFailureReason.UNKNOWN: lazy_ugettext(
118 118 'Pull request update failed because of an unknown error.'),
119 119 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
120 120 'No update needed because the source and target have not changed.'),
121 UpdateFailureReason.WRONG_REF_TPYE: lazy_ugettext(
121 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
122 122 'Pull request cannot be updated because the reference type is '
123 'not supported for an update.'),
123 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
124 124 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
125 125 'This pull request cannot be updated because the target '
126 126 'reference is missing.'),
127 127 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
128 128 'This pull request cannot be updated because the source '
129 129 'reference is missing.'),
130 130 }
131 131
132 132 def __get_pull_request(self, pull_request):
133 133 return self._get_instance((
134 134 PullRequest, PullRequestVersion), pull_request)
135 135
136 136 def _check_perms(self, perms, pull_request, user, api=False):
137 137 if not api:
138 138 return h.HasRepoPermissionAny(*perms)(
139 139 user=user, repo_name=pull_request.target_repo.repo_name)
140 140 else:
141 141 return h.HasRepoPermissionAnyApi(*perms)(
142 142 user=user, repo_name=pull_request.target_repo.repo_name)
143 143
144 144 def check_user_read(self, pull_request, user, api=False):
145 145 _perms = ('repository.admin', 'repository.write', 'repository.read',)
146 146 return self._check_perms(_perms, pull_request, user, api)
147 147
148 148 def check_user_merge(self, pull_request, user, api=False):
149 149 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
150 150 return self._check_perms(_perms, pull_request, user, api)
151 151
152 152 def check_user_update(self, pull_request, user, api=False):
153 153 owner = user.user_id == pull_request.user_id
154 154 return self.check_user_merge(pull_request, user, api) or owner
155 155
156 156 def check_user_delete(self, pull_request, user):
157 157 owner = user.user_id == pull_request.user_id
158 158 _perms = ('repository.admin',)
159 159 return self._check_perms(_perms, pull_request, user) or owner
160 160
161 161 def check_user_change_status(self, pull_request, user, api=False):
162 162 reviewer = user.user_id in [x.user_id for x in
163 163 pull_request.reviewers]
164 164 return self.check_user_update(pull_request, user, api) or reviewer
165 165
166 166 def get(self, pull_request):
167 167 return self.__get_pull_request(pull_request)
168 168
169 169 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
170 170 opened_by=None, order_by=None,
171 171 order_dir='desc'):
172 172 repo = None
173 173 if repo_name:
174 174 repo = self._get_repo(repo_name)
175 175
176 176 q = PullRequest.query()
177 177
178 178 # source or target
179 179 if repo and source:
180 180 q = q.filter(PullRequest.source_repo == repo)
181 181 elif repo:
182 182 q = q.filter(PullRequest.target_repo == repo)
183 183
184 184 # closed,opened
185 185 if statuses:
186 186 q = q.filter(PullRequest.status.in_(statuses))
187 187
188 188 # opened by filter
189 189 if opened_by:
190 190 q = q.filter(PullRequest.user_id.in_(opened_by))
191 191
192 192 if order_by:
193 193 order_map = {
194 194 'name_raw': PullRequest.pull_request_id,
195 195 'title': PullRequest.title,
196 196 'updated_on_raw': PullRequest.updated_on,
197 197 'target_repo': PullRequest.target_repo_id
198 198 }
199 199 if order_dir == 'asc':
200 200 q = q.order_by(order_map[order_by].asc())
201 201 else:
202 202 q = q.order_by(order_map[order_by].desc())
203 203
204 204 return q
205 205
206 206 def count_all(self, repo_name, source=False, statuses=None,
207 207 opened_by=None):
208 208 """
209 209 Count the number of pull requests for a specific repository.
210 210
211 211 :param repo_name: target or source repo
212 212 :param source: boolean flag to specify if repo_name refers to source
213 213 :param statuses: list of pull request statuses
214 214 :param opened_by: author user of the pull request
215 215 :returns: int number of pull requests
216 216 """
217 217 q = self._prepare_get_all_query(
218 218 repo_name, source=source, statuses=statuses, opened_by=opened_by)
219 219
220 220 return q.count()
221 221
222 222 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
223 223 offset=0, length=None, order_by=None, order_dir='desc'):
224 224 """
225 225 Get all pull requests for a specific repository.
226 226
227 227 :param repo_name: target or source repo
228 228 :param source: boolean flag to specify if repo_name refers to source
229 229 :param statuses: list of pull request statuses
230 230 :param opened_by: author user of the pull request
231 231 :param offset: pagination offset
232 232 :param length: length of returned list
233 233 :param order_by: order of the returned list
234 234 :param order_dir: 'asc' or 'desc' ordering direction
235 235 :returns: list of pull requests
236 236 """
237 237 q = self._prepare_get_all_query(
238 238 repo_name, source=source, statuses=statuses, opened_by=opened_by,
239 239 order_by=order_by, order_dir=order_dir)
240 240
241 241 if length:
242 242 pull_requests = q.limit(length).offset(offset).all()
243 243 else:
244 244 pull_requests = q.all()
245 245
246 246 return pull_requests
247 247
248 248 def count_awaiting_review(self, repo_name, source=False, statuses=None,
249 249 opened_by=None):
250 250 """
251 251 Count the number of pull requests for a specific repository that are
252 252 awaiting review.
253 253
254 254 :param repo_name: target or source repo
255 255 :param source: boolean flag to specify if repo_name refers to source
256 256 :param statuses: list of pull request statuses
257 257 :param opened_by: author user of the pull request
258 258 :returns: int number of pull requests
259 259 """
260 260 pull_requests = self.get_awaiting_review(
261 261 repo_name, source=source, statuses=statuses, opened_by=opened_by)
262 262
263 263 return len(pull_requests)
264 264
265 265 def get_awaiting_review(self, repo_name, source=False, statuses=None,
266 266 opened_by=None, offset=0, length=None,
267 267 order_by=None, order_dir='desc'):
268 268 """
269 269 Get all pull requests for a specific repository that are awaiting
270 270 review.
271 271
272 272 :param repo_name: target or source repo
273 273 :param source: boolean flag to specify if repo_name refers to source
274 274 :param statuses: list of pull request statuses
275 275 :param opened_by: author user of the pull request
276 276 :param offset: pagination offset
277 277 :param length: length of returned list
278 278 :param order_by: order of the returned list
279 279 :param order_dir: 'asc' or 'desc' ordering direction
280 280 :returns: list of pull requests
281 281 """
282 282 pull_requests = self.get_all(
283 283 repo_name, source=source, statuses=statuses, opened_by=opened_by,
284 284 order_by=order_by, order_dir=order_dir)
285 285
286 286 _filtered_pull_requests = []
287 287 for pr in pull_requests:
288 288 status = pr.calculated_review_status()
289 289 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
290 290 ChangesetStatus.STATUS_UNDER_REVIEW]:
291 291 _filtered_pull_requests.append(pr)
292 292 if length:
293 293 return _filtered_pull_requests[offset:offset+length]
294 294 else:
295 295 return _filtered_pull_requests
296 296
297 297 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
298 298 opened_by=None, user_id=None):
299 299 """
300 300 Count the number of pull requests for a specific repository that are
301 301 awaiting review from a specific user.
302 302
303 303 :param repo_name: target or source repo
304 304 :param source: boolean flag to specify if repo_name refers to source
305 305 :param statuses: list of pull request statuses
306 306 :param opened_by: author user of the pull request
307 307 :param user_id: reviewer user of the pull request
308 308 :returns: int number of pull requests
309 309 """
310 310 pull_requests = self.get_awaiting_my_review(
311 311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 312 user_id=user_id)
313 313
314 314 return len(pull_requests)
315 315
316 316 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
317 317 opened_by=None, user_id=None, offset=0,
318 318 length=None, order_by=None, order_dir='desc'):
319 319 """
320 320 Get all pull requests for a specific repository that are awaiting
321 321 review from a specific user.
322 322
323 323 :param repo_name: target or source repo
324 324 :param source: boolean flag to specify if repo_name refers to source
325 325 :param statuses: list of pull request statuses
326 326 :param opened_by: author user of the pull request
327 327 :param user_id: reviewer user of the pull request
328 328 :param offset: pagination offset
329 329 :param length: length of returned list
330 330 :param order_by: order of the returned list
331 331 :param order_dir: 'asc' or 'desc' ordering direction
332 332 :returns: list of pull requests
333 333 """
334 334 pull_requests = self.get_all(
335 335 repo_name, source=source, statuses=statuses, opened_by=opened_by,
336 336 order_by=order_by, order_dir=order_dir)
337 337
338 338 _my = PullRequestModel().get_not_reviewed(user_id)
339 339 my_participation = []
340 340 for pr in pull_requests:
341 341 if pr in _my:
342 342 my_participation.append(pr)
343 343 _filtered_pull_requests = my_participation
344 344 if length:
345 345 return _filtered_pull_requests[offset:offset+length]
346 346 else:
347 347 return _filtered_pull_requests
348 348
349 349 def get_not_reviewed(self, user_id):
350 350 return [
351 351 x.pull_request for x in PullRequestReviewers.query().filter(
352 352 PullRequestReviewers.user_id == user_id).all()
353 353 ]
354 354
355 355 def _prepare_participating_query(self, user_id=None, statuses=None,
356 356 order_by=None, order_dir='desc'):
357 357 q = PullRequest.query()
358 358 if user_id:
359 359 reviewers_subquery = Session().query(
360 360 PullRequestReviewers.pull_request_id).filter(
361 361 PullRequestReviewers.user_id == user_id).subquery()
362 362 user_filter= or_(
363 363 PullRequest.user_id == user_id,
364 364 PullRequest.pull_request_id.in_(reviewers_subquery)
365 365 )
366 366 q = PullRequest.query().filter(user_filter)
367 367
368 368 # closed,opened
369 369 if statuses:
370 370 q = q.filter(PullRequest.status.in_(statuses))
371 371
372 372 if order_by:
373 373 order_map = {
374 374 'name_raw': PullRequest.pull_request_id,
375 375 'title': PullRequest.title,
376 376 'updated_on_raw': PullRequest.updated_on,
377 377 'target_repo': PullRequest.target_repo_id
378 378 }
379 379 if order_dir == 'asc':
380 380 q = q.order_by(order_map[order_by].asc())
381 381 else:
382 382 q = q.order_by(order_map[order_by].desc())
383 383
384 384 return q
385 385
386 386 def count_im_participating_in(self, user_id=None, statuses=None):
387 387 q = self._prepare_participating_query(user_id, statuses=statuses)
388 388 return q.count()
389 389
390 390 def get_im_participating_in(
391 391 self, user_id=None, statuses=None, offset=0,
392 392 length=None, order_by=None, order_dir='desc'):
393 393 """
394 394 Get all Pull requests that i'm participating in, or i have opened
395 395 """
396 396
397 397 q = self._prepare_participating_query(
398 398 user_id, statuses=statuses, order_by=order_by,
399 399 order_dir=order_dir)
400 400
401 401 if length:
402 402 pull_requests = q.limit(length).offset(offset).all()
403 403 else:
404 404 pull_requests = q.all()
405 405
406 406 return pull_requests
407 407
408 408 def get_versions(self, pull_request):
409 409 """
410 410 returns version of pull request sorted by ID descending
411 411 """
412 412 return PullRequestVersion.query()\
413 413 .filter(PullRequestVersion.pull_request == pull_request)\
414 414 .order_by(PullRequestVersion.pull_request_version_id.asc())\
415 415 .all()
416 416
417 417 def create(self, created_by, source_repo, source_ref, target_repo,
418 418 target_ref, revisions, reviewers, title, description=None):
419 419 created_by_user = self._get_user(created_by)
420 420 source_repo = self._get_repo(source_repo)
421 421 target_repo = self._get_repo(target_repo)
422 422
423 423 pull_request = PullRequest()
424 424 pull_request.source_repo = source_repo
425 425 pull_request.source_ref = source_ref
426 426 pull_request.target_repo = target_repo
427 427 pull_request.target_ref = target_ref
428 428 pull_request.revisions = revisions
429 429 pull_request.title = title
430 430 pull_request.description = description
431 431 pull_request.author = created_by_user
432 432
433 433 Session().add(pull_request)
434 434 Session().flush()
435 435
436 436 reviewer_ids = set()
437 437 # members / reviewers
438 438 for reviewer_object in reviewers:
439 439 if isinstance(reviewer_object, tuple):
440 440 user_id, reasons = reviewer_object
441 441 else:
442 442 user_id, reasons = reviewer_object, []
443 443
444 444 user = self._get_user(user_id)
445 445 reviewer_ids.add(user.user_id)
446 446
447 447 reviewer = PullRequestReviewers(user, pull_request, reasons)
448 448 Session().add(reviewer)
449 449
450 450 # Set approval status to "Under Review" for all commits which are
451 451 # part of this pull request.
452 452 ChangesetStatusModel().set_status(
453 453 repo=target_repo,
454 454 status=ChangesetStatus.STATUS_UNDER_REVIEW,
455 455 user=created_by_user,
456 456 pull_request=pull_request
457 457 )
458 458
459 459 self.notify_reviewers(pull_request, reviewer_ids)
460 460 self._trigger_pull_request_hook(
461 461 pull_request, created_by_user, 'create')
462 462
463 463 return pull_request
464 464
465 465 def _trigger_pull_request_hook(self, pull_request, user, action):
466 466 pull_request = self.__get_pull_request(pull_request)
467 467 target_scm = pull_request.target_repo.scm_instance()
468 468 if action == 'create':
469 469 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
470 470 elif action == 'merge':
471 471 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
472 472 elif action == 'close':
473 473 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
474 474 elif action == 'review_status_change':
475 475 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
476 476 elif action == 'update':
477 477 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
478 478 else:
479 479 return
480 480
481 481 trigger_hook(
482 482 username=user.username,
483 483 repo_name=pull_request.target_repo.repo_name,
484 484 repo_alias=target_scm.alias,
485 485 pull_request=pull_request)
486 486
487 487 def _get_commit_ids(self, pull_request):
488 488 """
489 489 Return the commit ids of the merged pull request.
490 490
491 491 This method is not dealing correctly yet with the lack of autoupdates
492 492 nor with the implicit target updates.
493 493 For example: if a commit in the source repo is already in the target it
494 494 will be reported anyways.
495 495 """
496 496 merge_rev = pull_request.merge_rev
497 497 if merge_rev is None:
498 498 raise ValueError('This pull request was not merged yet')
499 499
500 500 commit_ids = list(pull_request.revisions)
501 501 if merge_rev not in commit_ids:
502 502 commit_ids.append(merge_rev)
503 503
504 504 return commit_ids
505 505
506 506 def merge(self, pull_request, user, extras):
507 507 log.debug("Merging pull request %s", pull_request.pull_request_id)
508 508 merge_state = self._merge_pull_request(pull_request, user, extras)
509 509 if merge_state.executed:
510 510 log.debug(
511 511 "Merge was successful, updating the pull request comments.")
512 512 self._comment_and_close_pr(pull_request, user, merge_state)
513 513 self._log_action('user_merged_pull_request', user, pull_request)
514 514 else:
515 515 log.warn("Merge failed, not updating the pull request.")
516 516 return merge_state
517 517
518 518 def _merge_pull_request(self, pull_request, user, extras):
519 519 target_vcs = pull_request.target_repo.scm_instance()
520 520 source_vcs = pull_request.source_repo.scm_instance()
521 521 target_ref = self._refresh_reference(
522 522 pull_request.target_ref_parts, target_vcs)
523 523
524 524 message = _(
525 525 'Merge pull request #%(pr_id)s from '
526 526 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
527 527 'pr_id': pull_request.pull_request_id,
528 528 'source_repo': source_vcs.name,
529 529 'source_ref_name': pull_request.source_ref_parts.name,
530 530 'pr_title': pull_request.title
531 531 }
532 532
533 533 workspace_id = self._workspace_id(pull_request)
534 534 use_rebase = self._use_rebase_for_merging(pull_request)
535 535
536 536 callback_daemon, extras = prepare_callback_daemon(
537 537 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
538 538 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
539 539
540 540 with callback_daemon:
541 541 # TODO: johbo: Implement a clean way to run a config_override
542 542 # for a single call.
543 543 target_vcs.config.set(
544 544 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
545 545 merge_state = target_vcs.merge(
546 546 target_ref, source_vcs, pull_request.source_ref_parts,
547 547 workspace_id, user_name=user.username,
548 548 user_email=user.email, message=message, use_rebase=use_rebase)
549 549 return merge_state
550 550
551 551 def _comment_and_close_pr(self, pull_request, user, merge_state):
552 552 pull_request.merge_rev = merge_state.merge_ref.commit_id
553 553 pull_request.updated_on = datetime.datetime.now()
554 554
555 555 CommentsModel().create(
556 556 text=unicode(_('Pull request merged and closed')),
557 557 repo=pull_request.target_repo.repo_id,
558 558 user=user.user_id,
559 559 pull_request=pull_request.pull_request_id,
560 560 f_path=None,
561 561 line_no=None,
562 562 closing_pr=True
563 563 )
564 564
565 565 Session().add(pull_request)
566 566 Session().flush()
567 567 # TODO: paris: replace invalidation with less radical solution
568 568 ScmModel().mark_for_invalidation(
569 569 pull_request.target_repo.repo_name)
570 570 self._trigger_pull_request_hook(pull_request, user, 'merge')
571 571
572 572 def has_valid_update_type(self, pull_request):
573 573 source_ref_type = pull_request.source_ref_parts.type
574 574 return source_ref_type in ['book', 'branch', 'tag']
575 575
576 576 def update_commits(self, pull_request):
577 577 """
578 578 Get the updated list of commits for the pull request
579 579 and return the new pull request version and the list
580 580 of commits processed by this update action
581 581 """
582 582 pull_request = self.__get_pull_request(pull_request)
583 583 source_ref_type = pull_request.source_ref_parts.type
584 584 source_ref_name = pull_request.source_ref_parts.name
585 585 source_ref_id = pull_request.source_ref_parts.commit_id
586 586
587 587 target_ref_type = pull_request.target_ref_parts.type
588 588 target_ref_name = pull_request.target_ref_parts.name
589 589 target_ref_id = pull_request.target_ref_parts.commit_id
590 590
591 591 if not self.has_valid_update_type(pull_request):
592 592 log.debug(
593 593 "Skipping update of pull request %s due to ref type: %s",
594 594 pull_request, source_ref_type)
595 595 return UpdateResponse(
596 596 executed=False,
597 reason=UpdateFailureReason.WRONG_REF_TPYE,
597 reason=UpdateFailureReason.WRONG_REF_TYPE,
598 598 old=pull_request, new=None, changes=None,
599 599 source_changed=False, target_changed=False)
600 600
601 601 # source repo
602 602 source_repo = pull_request.source_repo.scm_instance()
603 603 try:
604 604 source_commit = source_repo.get_commit(commit_id=source_ref_name)
605 605 except CommitDoesNotExistError:
606 606 return UpdateResponse(
607 607 executed=False,
608 608 reason=UpdateFailureReason.MISSING_SOURCE_REF,
609 609 old=pull_request, new=None, changes=None,
610 610 source_changed=False, target_changed=False)
611 611
612 612 source_changed = source_ref_id != source_commit.raw_id
613 613
614 614 # target repo
615 615 target_repo = pull_request.target_repo.scm_instance()
616 616 try:
617 617 target_commit = target_repo.get_commit(commit_id=target_ref_name)
618 618 except CommitDoesNotExistError:
619 619 return UpdateResponse(
620 620 executed=False,
621 621 reason=UpdateFailureReason.MISSING_TARGET_REF,
622 622 old=pull_request, new=None, changes=None,
623 623 source_changed=False, target_changed=False)
624 624 target_changed = target_ref_id != target_commit.raw_id
625 625
626 626 if not (source_changed or target_changed):
627 627 log.debug("Nothing changed in pull request %s", pull_request)
628 628 return UpdateResponse(
629 629 executed=False,
630 630 reason=UpdateFailureReason.NO_CHANGE,
631 631 old=pull_request, new=None, changes=None,
632 632 source_changed=target_changed, target_changed=source_changed)
633 633
634 634 change_in_found = 'target repo' if target_changed else 'source repo'
635 635 log.debug('Updating pull request because of change in %s detected',
636 636 change_in_found)
637 637
638 638 # Finally there is a need for an update, in case of source change
639 639 # we create a new version, else just an update
640 640 if source_changed:
641 641 pull_request_version = self._create_version_from_snapshot(pull_request)
642 642 self._link_comments_to_version(pull_request_version)
643 643 else:
644 644 try:
645 645 ver = pull_request.versions[-1]
646 646 except IndexError:
647 647 ver = None
648 648
649 649 pull_request.pull_request_version_id = \
650 650 ver.pull_request_version_id if ver else None
651 651 pull_request_version = pull_request
652 652
653 653 try:
654 654 if target_ref_type in ('tag', 'branch', 'book'):
655 655 target_commit = target_repo.get_commit(target_ref_name)
656 656 else:
657 657 target_commit = target_repo.get_commit(target_ref_id)
658 658 except CommitDoesNotExistError:
659 659 return UpdateResponse(
660 660 executed=False,
661 661 reason=UpdateFailureReason.MISSING_TARGET_REF,
662 662 old=pull_request, new=None, changes=None,
663 663 source_changed=source_changed, target_changed=target_changed)
664 664
665 665 # re-compute commit ids
666 666 old_commit_ids = pull_request.revisions
667 667 pre_load = ["author", "branch", "date", "message"]
668 668 commit_ranges = target_repo.compare(
669 669 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
670 670 pre_load=pre_load)
671 671
672 672 ancestor = target_repo.get_common_ancestor(
673 673 target_commit.raw_id, source_commit.raw_id, source_repo)
674 674
675 675 pull_request.source_ref = '%s:%s:%s' % (
676 676 source_ref_type, source_ref_name, source_commit.raw_id)
677 677 pull_request.target_ref = '%s:%s:%s' % (
678 678 target_ref_type, target_ref_name, ancestor)
679 679
680 680 pull_request.revisions = [
681 681 commit.raw_id for commit in reversed(commit_ranges)]
682 682 pull_request.updated_on = datetime.datetime.now()
683 683 Session().add(pull_request)
684 684 new_commit_ids = pull_request.revisions
685 685
686 686 old_diff_data, new_diff_data = self._generate_update_diffs(
687 687 pull_request, pull_request_version)
688 688
689 689 # calculate commit and file changes
690 690 changes = self._calculate_commit_id_changes(
691 691 old_commit_ids, new_commit_ids)
692 692 file_changes = self._calculate_file_changes(
693 693 old_diff_data, new_diff_data)
694 694
695 695 # set comments as outdated if DIFFS changed
696 696 CommentsModel().outdate_comments(
697 697 pull_request, old_diff_data=old_diff_data,
698 698 new_diff_data=new_diff_data)
699 699
700 700 commit_changes = (changes.added or changes.removed)
701 701 file_node_changes = (
702 702 file_changes.added or file_changes.modified or file_changes.removed)
703 703 pr_has_changes = commit_changes or file_node_changes
704 704
705 705 # Add an automatic comment to the pull request, in case
706 706 # anything has changed
707 707 if pr_has_changes:
708 708 update_comment = CommentsModel().create(
709 709 text=self._render_update_message(changes, file_changes),
710 710 repo=pull_request.target_repo,
711 711 user=pull_request.author,
712 712 pull_request=pull_request,
713 713 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
714 714
715 715 # Update status to "Under Review" for added commits
716 716 for commit_id in changes.added:
717 717 ChangesetStatusModel().set_status(
718 718 repo=pull_request.source_repo,
719 719 status=ChangesetStatus.STATUS_UNDER_REVIEW,
720 720 comment=update_comment,
721 721 user=pull_request.author,
722 722 pull_request=pull_request,
723 723 revision=commit_id)
724 724
725 725 log.debug(
726 726 'Updated pull request %s, added_ids: %s, common_ids: %s, '
727 727 'removed_ids: %s', pull_request.pull_request_id,
728 728 changes.added, changes.common, changes.removed)
729 729 log.debug(
730 730 'Updated pull request with the following file changes: %s',
731 731 file_changes)
732 732
733 733 log.info(
734 734 "Updated pull request %s from commit %s to commit %s, "
735 735 "stored new version %s of this pull request.",
736 736 pull_request.pull_request_id, source_ref_id,
737 737 pull_request.source_ref_parts.commit_id,
738 738 pull_request_version.pull_request_version_id)
739 739 Session().commit()
740 740 self._trigger_pull_request_hook(
741 741 pull_request, pull_request.author, 'update')
742 742
743 743 return UpdateResponse(
744 744 executed=True, reason=UpdateFailureReason.NONE,
745 745 old=pull_request, new=pull_request_version, changes=changes,
746 746 source_changed=source_changed, target_changed=target_changed)
747 747
748 748 def _create_version_from_snapshot(self, pull_request):
749 749 version = PullRequestVersion()
750 750 version.title = pull_request.title
751 751 version.description = pull_request.description
752 752 version.status = pull_request.status
753 753 version.created_on = datetime.datetime.now()
754 754 version.updated_on = pull_request.updated_on
755 755 version.user_id = pull_request.user_id
756 756 version.source_repo = pull_request.source_repo
757 757 version.source_ref = pull_request.source_ref
758 758 version.target_repo = pull_request.target_repo
759 759 version.target_ref = pull_request.target_ref
760 760
761 761 version._last_merge_source_rev = pull_request._last_merge_source_rev
762 762 version._last_merge_target_rev = pull_request._last_merge_target_rev
763 763 version._last_merge_status = pull_request._last_merge_status
764 764 version.shadow_merge_ref = pull_request.shadow_merge_ref
765 765 version.merge_rev = pull_request.merge_rev
766 766
767 767 version.revisions = pull_request.revisions
768 768 version.pull_request = pull_request
769 769 Session().add(version)
770 770 Session().flush()
771 771
772 772 return version
773 773
774 774 def _generate_update_diffs(self, pull_request, pull_request_version):
775 775
776 776 diff_context = (
777 777 self.DIFF_CONTEXT +
778 778 CommentsModel.needed_extra_diff_context())
779 779
780 780 source_repo = pull_request_version.source_repo
781 781 source_ref_id = pull_request_version.source_ref_parts.commit_id
782 782 target_ref_id = pull_request_version.target_ref_parts.commit_id
783 783 old_diff = self._get_diff_from_pr_or_version(
784 784 source_repo, source_ref_id, target_ref_id, context=diff_context)
785 785
786 786 source_repo = pull_request.source_repo
787 787 source_ref_id = pull_request.source_ref_parts.commit_id
788 788 target_ref_id = pull_request.target_ref_parts.commit_id
789 789
790 790 new_diff = self._get_diff_from_pr_or_version(
791 791 source_repo, source_ref_id, target_ref_id, context=diff_context)
792 792
793 793 old_diff_data = diffs.DiffProcessor(old_diff)
794 794 old_diff_data.prepare()
795 795 new_diff_data = diffs.DiffProcessor(new_diff)
796 796 new_diff_data.prepare()
797 797
798 798 return old_diff_data, new_diff_data
799 799
800 800 def _link_comments_to_version(self, pull_request_version):
801 801 """
802 802 Link all unlinked comments of this pull request to the given version.
803 803
804 804 :param pull_request_version: The `PullRequestVersion` to which
805 805 the comments shall be linked.
806 806
807 807 """
808 808 pull_request = pull_request_version.pull_request
809 809 comments = ChangesetComment.query().filter(
810 810 # TODO: johbo: Should we query for the repo at all here?
811 811 # Pending decision on how comments of PRs are to be related
812 812 # to either the source repo, the target repo or no repo at all.
813 813 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
814 814 ChangesetComment.pull_request == pull_request,
815 815 ChangesetComment.pull_request_version == None)
816 816
817 817 # TODO: johbo: Find out why this breaks if it is done in a bulk
818 818 # operation.
819 819 for comment in comments:
820 820 comment.pull_request_version_id = (
821 821 pull_request_version.pull_request_version_id)
822 822 Session().add(comment)
823 823
824 824 def _calculate_commit_id_changes(self, old_ids, new_ids):
825 825 added = [x for x in new_ids if x not in old_ids]
826 826 common = [x for x in new_ids if x in old_ids]
827 827 removed = [x for x in old_ids if x not in new_ids]
828 828 total = new_ids
829 829 return ChangeTuple(added, common, removed, total)
830 830
831 831 def _calculate_file_changes(self, old_diff_data, new_diff_data):
832 832
833 833 old_files = OrderedDict()
834 834 for diff_data in old_diff_data.parsed_diff:
835 835 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
836 836
837 837 added_files = []
838 838 modified_files = []
839 839 removed_files = []
840 840 for diff_data in new_diff_data.parsed_diff:
841 841 new_filename = diff_data['filename']
842 842 new_hash = md5_safe(diff_data['raw_diff'])
843 843
844 844 old_hash = old_files.get(new_filename)
845 845 if not old_hash:
846 846 # file is not present in old diff, means it's added
847 847 added_files.append(new_filename)
848 848 else:
849 849 if new_hash != old_hash:
850 850 modified_files.append(new_filename)
851 851 # now remove a file from old, since we have seen it already
852 852 del old_files[new_filename]
853 853
854 854 # removed files is when there are present in old, but not in NEW,
855 855 # since we remove old files that are present in new diff, left-overs
856 856 # if any should be the removed files
857 857 removed_files.extend(old_files.keys())
858 858
859 859 return FileChangeTuple(added_files, modified_files, removed_files)
860 860
861 861 def _render_update_message(self, changes, file_changes):
862 862 """
863 863 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
864 864 so it's always looking the same disregarding on which default
865 865 renderer system is using.
866 866
867 867 :param changes: changes named tuple
868 868 :param file_changes: file changes named tuple
869 869
870 870 """
871 871 new_status = ChangesetStatus.get_status_lbl(
872 872 ChangesetStatus.STATUS_UNDER_REVIEW)
873 873
874 874 changed_files = (
875 875 file_changes.added + file_changes.modified + file_changes.removed)
876 876
877 877 params = {
878 878 'under_review_label': new_status,
879 879 'added_commits': changes.added,
880 880 'removed_commits': changes.removed,
881 881 'changed_files': changed_files,
882 882 'added_files': file_changes.added,
883 883 'modified_files': file_changes.modified,
884 884 'removed_files': file_changes.removed,
885 885 }
886 886 renderer = RstTemplateRenderer()
887 887 return renderer.render('pull_request_update.mako', **params)
888 888
889 889 def edit(self, pull_request, title, description):
890 890 pull_request = self.__get_pull_request(pull_request)
891 891 if pull_request.is_closed():
892 892 raise ValueError('This pull request is closed')
893 893 if title:
894 894 pull_request.title = title
895 895 pull_request.description = description
896 896 pull_request.updated_on = datetime.datetime.now()
897 897 Session().add(pull_request)
898 898
899 899 def update_reviewers(self, pull_request, reviewer_data):
900 900 """
901 901 Update the reviewers in the pull request
902 902
903 903 :param pull_request: the pr to update
904 904 :param reviewer_data: list of tuples [(user, ['reason1', 'reason2'])]
905 905 """
906 906
907 907 reviewers_reasons = {}
908 908 for user_id, reasons in reviewer_data:
909 909 if isinstance(user_id, (int, basestring)):
910 910 user_id = self._get_user(user_id).user_id
911 911 reviewers_reasons[user_id] = reasons
912 912
913 913 reviewers_ids = set(reviewers_reasons.keys())
914 914 pull_request = self.__get_pull_request(pull_request)
915 915 current_reviewers = PullRequestReviewers.query()\
916 916 .filter(PullRequestReviewers.pull_request ==
917 917 pull_request).all()
918 918 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
919 919
920 920 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
921 921 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
922 922
923 923 log.debug("Adding %s reviewers", ids_to_add)
924 924 log.debug("Removing %s reviewers", ids_to_remove)
925 925 changed = False
926 926 for uid in ids_to_add:
927 927 changed = True
928 928 _usr = self._get_user(uid)
929 929 reasons = reviewers_reasons[uid]
930 930 reviewer = PullRequestReviewers(_usr, pull_request, reasons)
931 931 Session().add(reviewer)
932 932
933 933 for uid in ids_to_remove:
934 934 changed = True
935 935 reviewers = PullRequestReviewers.query()\
936 936 .filter(PullRequestReviewers.user_id == uid,
937 937 PullRequestReviewers.pull_request == pull_request)\
938 938 .all()
939 939 # use .all() in case we accidentally added the same person twice
940 940 # this CAN happen due to the lack of DB checks
941 941 for obj in reviewers:
942 942 Session().delete(obj)
943 943
944 944 if changed:
945 945 pull_request.updated_on = datetime.datetime.now()
946 946 Session().add(pull_request)
947 947
948 948 self.notify_reviewers(pull_request, ids_to_add)
949 949 return ids_to_add, ids_to_remove
950 950
951 951 def get_url(self, pull_request):
952 952 return h.url('pullrequest_show',
953 953 repo_name=safe_str(pull_request.target_repo.repo_name),
954 954 pull_request_id=pull_request.pull_request_id,
955 955 qualified=True)
956 956
957 957 def get_shadow_clone_url(self, pull_request):
958 958 """
959 959 Returns qualified url pointing to the shadow repository. If this pull
960 960 request is closed there is no shadow repository and ``None`` will be
961 961 returned.
962 962 """
963 963 if pull_request.is_closed():
964 964 return None
965 965 else:
966 966 pr_url = urllib.unquote(self.get_url(pull_request))
967 967 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
968 968
969 969 def notify_reviewers(self, pull_request, reviewers_ids):
970 970 # notification to reviewers
971 971 if not reviewers_ids:
972 972 return
973 973
974 974 pull_request_obj = pull_request
975 975 # get the current participants of this pull request
976 976 recipients = reviewers_ids
977 977 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
978 978
979 979 pr_source_repo = pull_request_obj.source_repo
980 980 pr_target_repo = pull_request_obj.target_repo
981 981
982 982 pr_url = h.url(
983 983 'pullrequest_show',
984 984 repo_name=pr_target_repo.repo_name,
985 985 pull_request_id=pull_request_obj.pull_request_id,
986 986 qualified=True,)
987 987
988 988 # set some variables for email notification
989 989 pr_target_repo_url = h.url(
990 990 'summary_home',
991 991 repo_name=pr_target_repo.repo_name,
992 992 qualified=True)
993 993
994 994 pr_source_repo_url = h.url(
995 995 'summary_home',
996 996 repo_name=pr_source_repo.repo_name,
997 997 qualified=True)
998 998
999 999 # pull request specifics
1000 1000 pull_request_commits = [
1001 1001 (x.raw_id, x.message)
1002 1002 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1003 1003
1004 1004 kwargs = {
1005 1005 'user': pull_request.author,
1006 1006 'pull_request': pull_request_obj,
1007 1007 'pull_request_commits': pull_request_commits,
1008 1008
1009 1009 'pull_request_target_repo': pr_target_repo,
1010 1010 'pull_request_target_repo_url': pr_target_repo_url,
1011 1011
1012 1012 'pull_request_source_repo': pr_source_repo,
1013 1013 'pull_request_source_repo_url': pr_source_repo_url,
1014 1014
1015 1015 'pull_request_url': pr_url,
1016 1016 }
1017 1017
1018 1018 # pre-generate the subject for notification itself
1019 1019 (subject,
1020 1020 _h, _e, # we don't care about those
1021 1021 body_plaintext) = EmailNotificationModel().render_email(
1022 1022 notification_type, **kwargs)
1023 1023
1024 1024 # create notification objects, and emails
1025 1025 NotificationModel().create(
1026 1026 created_by=pull_request.author,
1027 1027 notification_subject=subject,
1028 1028 notification_body=body_plaintext,
1029 1029 notification_type=notification_type,
1030 1030 recipients=recipients,
1031 1031 email_kwargs=kwargs,
1032 1032 )
1033 1033
1034 1034 def delete(self, pull_request):
1035 1035 pull_request = self.__get_pull_request(pull_request)
1036 1036 self._cleanup_merge_workspace(pull_request)
1037 1037 Session().delete(pull_request)
1038 1038
1039 1039 def close_pull_request(self, pull_request, user):
1040 1040 pull_request = self.__get_pull_request(pull_request)
1041 1041 self._cleanup_merge_workspace(pull_request)
1042 1042 pull_request.status = PullRequest.STATUS_CLOSED
1043 1043 pull_request.updated_on = datetime.datetime.now()
1044 1044 Session().add(pull_request)
1045 1045 self._trigger_pull_request_hook(
1046 1046 pull_request, pull_request.author, 'close')
1047 1047 self._log_action('user_closed_pull_request', user, pull_request)
1048 1048
1049 1049 def close_pull_request_with_comment(self, pull_request, user, repo,
1050 1050 message=None):
1051 1051 status = ChangesetStatus.STATUS_REJECTED
1052 1052
1053 1053 if not message:
1054 1054 message = (
1055 1055 _('Status change %(transition_icon)s %(status)s') % {
1056 1056 'transition_icon': '>',
1057 1057 'status': ChangesetStatus.get_status_lbl(status)})
1058 1058
1059 1059 internal_message = _('Closing with') + ' ' + message
1060 1060
1061 1061 comm = CommentsModel().create(
1062 1062 text=internal_message,
1063 1063 repo=repo.repo_id,
1064 1064 user=user.user_id,
1065 1065 pull_request=pull_request.pull_request_id,
1066 1066 f_path=None,
1067 1067 line_no=None,
1068 1068 status_change=ChangesetStatus.get_status_lbl(status),
1069 1069 status_change_type=status,
1070 1070 closing_pr=True
1071 1071 )
1072 1072
1073 1073 ChangesetStatusModel().set_status(
1074 1074 repo.repo_id,
1075 1075 status,
1076 1076 user.user_id,
1077 1077 comm,
1078 1078 pull_request=pull_request.pull_request_id
1079 1079 )
1080 1080 Session().flush()
1081 1081
1082 1082 PullRequestModel().close_pull_request(
1083 1083 pull_request.pull_request_id, user)
1084 1084
1085 1085 def merge_status(self, pull_request):
1086 1086 if not self._is_merge_enabled(pull_request):
1087 1087 return False, _('Server-side pull request merging is disabled.')
1088 1088 if pull_request.is_closed():
1089 1089 return False, _('This pull request is closed.')
1090 1090 merge_possible, msg = self._check_repo_requirements(
1091 1091 target=pull_request.target_repo, source=pull_request.source_repo)
1092 1092 if not merge_possible:
1093 1093 return merge_possible, msg
1094 1094
1095 1095 try:
1096 1096 resp = self._try_merge(pull_request)
1097 1097 log.debug("Merge response: %s", resp)
1098 1098 status = resp.possible, self.merge_status_message(
1099 1099 resp.failure_reason)
1100 1100 except NotImplementedError:
1101 1101 status = False, _('Pull request merging is not supported.')
1102 1102
1103 1103 return status
1104 1104
1105 1105 def _check_repo_requirements(self, target, source):
1106 1106 """
1107 1107 Check if `target` and `source` have compatible requirements.
1108 1108
1109 1109 Currently this is just checking for largefiles.
1110 1110 """
1111 1111 target_has_largefiles = self._has_largefiles(target)
1112 1112 source_has_largefiles = self._has_largefiles(source)
1113 1113 merge_possible = True
1114 1114 message = u''
1115 1115
1116 1116 if target_has_largefiles != source_has_largefiles:
1117 1117 merge_possible = False
1118 1118 if source_has_largefiles:
1119 1119 message = _(
1120 1120 'Target repository large files support is disabled.')
1121 1121 else:
1122 1122 message = _(
1123 1123 'Source repository large files support is disabled.')
1124 1124
1125 1125 return merge_possible, message
1126 1126
1127 1127 def _has_largefiles(self, repo):
1128 1128 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1129 1129 'extensions', 'largefiles')
1130 1130 return largefiles_ui and largefiles_ui[0].active
1131 1131
1132 1132 def _try_merge(self, pull_request):
1133 1133 """
1134 1134 Try to merge the pull request and return the merge status.
1135 1135 """
1136 1136 log.debug(
1137 1137 "Trying out if the pull request %s can be merged.",
1138 1138 pull_request.pull_request_id)
1139 1139 target_vcs = pull_request.target_repo.scm_instance()
1140 1140
1141 1141 # Refresh the target reference.
1142 1142 try:
1143 1143 target_ref = self._refresh_reference(
1144 1144 pull_request.target_ref_parts, target_vcs)
1145 1145 except CommitDoesNotExistError:
1146 1146 merge_state = MergeResponse(
1147 1147 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1148 1148 return merge_state
1149 1149
1150 1150 target_locked = pull_request.target_repo.locked
1151 1151 if target_locked and target_locked[0]:
1152 1152 log.debug("The target repository is locked.")
1153 1153 merge_state = MergeResponse(
1154 1154 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1155 1155 elif self._needs_merge_state_refresh(pull_request, target_ref):
1156 1156 log.debug("Refreshing the merge status of the repository.")
1157 1157 merge_state = self._refresh_merge_state(
1158 1158 pull_request, target_vcs, target_ref)
1159 1159 else:
1160 1160 possible = pull_request.\
1161 1161 _last_merge_status == MergeFailureReason.NONE
1162 1162 merge_state = MergeResponse(
1163 1163 possible, False, None, pull_request._last_merge_status)
1164 1164
1165 1165 return merge_state
1166 1166
1167 1167 def _refresh_reference(self, reference, vcs_repository):
1168 1168 if reference.type in ('branch', 'book'):
1169 1169 name_or_id = reference.name
1170 1170 else:
1171 1171 name_or_id = reference.commit_id
1172 1172 refreshed_commit = vcs_repository.get_commit(name_or_id)
1173 1173 refreshed_reference = Reference(
1174 1174 reference.type, reference.name, refreshed_commit.raw_id)
1175 1175 return refreshed_reference
1176 1176
1177 1177 def _needs_merge_state_refresh(self, pull_request, target_reference):
1178 1178 return not(
1179 1179 pull_request.revisions and
1180 1180 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1181 1181 target_reference.commit_id == pull_request._last_merge_target_rev)
1182 1182
1183 1183 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1184 1184 workspace_id = self._workspace_id(pull_request)
1185 1185 source_vcs = pull_request.source_repo.scm_instance()
1186 1186 use_rebase = self._use_rebase_for_merging(pull_request)
1187 1187 merge_state = target_vcs.merge(
1188 1188 target_reference, source_vcs, pull_request.source_ref_parts,
1189 1189 workspace_id, dry_run=True, use_rebase=use_rebase)
1190 1190
1191 1191 # Do not store the response if there was an unknown error.
1192 1192 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1193 1193 pull_request._last_merge_source_rev = \
1194 1194 pull_request.source_ref_parts.commit_id
1195 1195 pull_request._last_merge_target_rev = target_reference.commit_id
1196 1196 pull_request._last_merge_status = merge_state.failure_reason
1197 1197 pull_request.shadow_merge_ref = merge_state.merge_ref
1198 1198 Session().add(pull_request)
1199 1199 Session().commit()
1200 1200
1201 1201 return merge_state
1202 1202
1203 1203 def _workspace_id(self, pull_request):
1204 1204 workspace_id = 'pr-%s' % pull_request.pull_request_id
1205 1205 return workspace_id
1206 1206
1207 1207 def merge_status_message(self, status_code):
1208 1208 """
1209 1209 Return a human friendly error message for the given merge status code.
1210 1210 """
1211 1211 return self.MERGE_STATUS_MESSAGES[status_code]
1212 1212
1213 1213 def generate_repo_data(self, repo, commit_id=None, branch=None,
1214 1214 bookmark=None):
1215 1215 all_refs, selected_ref = \
1216 1216 self._get_repo_pullrequest_sources(
1217 1217 repo.scm_instance(), commit_id=commit_id,
1218 1218 branch=branch, bookmark=bookmark)
1219 1219
1220 1220 refs_select2 = []
1221 1221 for element in all_refs:
1222 1222 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1223 1223 refs_select2.append({'text': element[1], 'children': children})
1224 1224
1225 1225 return {
1226 1226 'user': {
1227 1227 'user_id': repo.user.user_id,
1228 1228 'username': repo.user.username,
1229 1229 'firstname': repo.user.firstname,
1230 1230 'lastname': repo.user.lastname,
1231 1231 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1232 1232 },
1233 1233 'description': h.chop_at_smart(repo.description, '\n'),
1234 1234 'refs': {
1235 1235 'all_refs': all_refs,
1236 1236 'selected_ref': selected_ref,
1237 1237 'select2_refs': refs_select2
1238 1238 }
1239 1239 }
1240 1240
1241 1241 def generate_pullrequest_title(self, source, source_ref, target):
1242 1242 return u'{source}#{at_ref} to {target}'.format(
1243 1243 source=source,
1244 1244 at_ref=source_ref,
1245 1245 target=target,
1246 1246 )
1247 1247
1248 1248 def _cleanup_merge_workspace(self, pull_request):
1249 1249 # Merging related cleanup
1250 1250 target_scm = pull_request.target_repo.scm_instance()
1251 1251 workspace_id = 'pr-%s' % pull_request.pull_request_id
1252 1252
1253 1253 try:
1254 1254 target_scm.cleanup_merge_workspace(workspace_id)
1255 1255 except NotImplementedError:
1256 1256 pass
1257 1257
1258 1258 def _get_repo_pullrequest_sources(
1259 1259 self, repo, commit_id=None, branch=None, bookmark=None):
1260 1260 """
1261 1261 Return a structure with repo's interesting commits, suitable for
1262 1262 the selectors in pullrequest controller
1263 1263
1264 1264 :param commit_id: a commit that must be in the list somehow
1265 1265 and selected by default
1266 1266 :param branch: a branch that must be in the list and selected
1267 1267 by default - even if closed
1268 1268 :param bookmark: a bookmark that must be in the list and selected
1269 1269 """
1270 1270
1271 1271 commit_id = safe_str(commit_id) if commit_id else None
1272 1272 branch = safe_str(branch) if branch else None
1273 1273 bookmark = safe_str(bookmark) if bookmark else None
1274 1274
1275 1275 selected = None
1276 1276
1277 1277 # order matters: first source that has commit_id in it will be selected
1278 1278 sources = []
1279 1279 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1280 1280 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1281 1281
1282 1282 if commit_id:
1283 1283 ref_commit = (h.short_id(commit_id), commit_id)
1284 1284 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1285 1285
1286 1286 sources.append(
1287 1287 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1288 1288 )
1289 1289
1290 1290 groups = []
1291 1291 for group_key, ref_list, group_name, match in sources:
1292 1292 group_refs = []
1293 1293 for ref_name, ref_id in ref_list:
1294 1294 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1295 1295 group_refs.append((ref_key, ref_name))
1296 1296
1297 1297 if not selected:
1298 1298 if set([commit_id, match]) & set([ref_id, ref_name]):
1299 1299 selected = ref_key
1300 1300
1301 1301 if group_refs:
1302 1302 groups.append((group_refs, group_name))
1303 1303
1304 1304 if not selected:
1305 1305 ref = commit_id or branch or bookmark
1306 1306 if ref:
1307 1307 raise CommitDoesNotExistError(
1308 1308 'No commit refs could be found matching: %s' % ref)
1309 1309 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1310 1310 selected = 'branch:%s:%s' % (
1311 1311 repo.DEFAULT_BRANCH_NAME,
1312 1312 repo.branches[repo.DEFAULT_BRANCH_NAME]
1313 1313 )
1314 1314 elif repo.commit_ids:
1315 1315 rev = repo.commit_ids[0]
1316 1316 selected = 'rev:%s:%s' % (rev, rev)
1317 1317 else:
1318 1318 raise EmptyRepositoryError()
1319 1319 return groups, selected
1320 1320
1321 1321 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1322 1322 return self._get_diff_from_pr_or_version(
1323 1323 source_repo, source_ref_id, target_ref_id, context=context)
1324 1324
1325 1325 def _get_diff_from_pr_or_version(
1326 1326 self, source_repo, source_ref_id, target_ref_id, context):
1327 1327 target_commit = source_repo.get_commit(
1328 1328 commit_id=safe_str(target_ref_id))
1329 1329 source_commit = source_repo.get_commit(
1330 1330 commit_id=safe_str(source_ref_id))
1331 1331 if isinstance(source_repo, Repository):
1332 1332 vcs_repo = source_repo.scm_instance()
1333 1333 else:
1334 1334 vcs_repo = source_repo
1335 1335
1336 1336 # TODO: johbo: In the context of an update, we cannot reach
1337 1337 # the old commit anymore with our normal mechanisms. It needs
1338 1338 # some sort of special support in the vcs layer to avoid this
1339 1339 # workaround.
1340 1340 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1341 1341 vcs_repo.alias == 'git'):
1342 1342 source_commit.raw_id = safe_str(source_ref_id)
1343 1343
1344 1344 log.debug('calculating diff between '
1345 1345 'source_ref:%s and target_ref:%s for repo `%s`',
1346 1346 target_ref_id, source_ref_id,
1347 1347 safe_unicode(vcs_repo.path))
1348 1348
1349 1349 vcs_diff = vcs_repo.get_diff(
1350 1350 commit1=target_commit, commit2=source_commit, context=context)
1351 1351 return vcs_diff
1352 1352
1353 1353 def _is_merge_enabled(self, pull_request):
1354 1354 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1355 1355 settings = settings_model.get_general_settings()
1356 1356 return settings.get('rhodecode_pr_merge_enabled', False)
1357 1357
1358 1358 def _use_rebase_for_merging(self, pull_request):
1359 1359 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1360 1360 settings = settings_model.get_general_settings()
1361 1361 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1362 1362
1363 1363 def _log_action(self, action, user, pull_request):
1364 1364 action_logger(
1365 1365 user,
1366 1366 '{action}:{pr_id}'.format(
1367 1367 action=action, pr_id=pull_request.pull_request_id),
1368 1368 pull_request.target_repo)
1369 1369
1370 1370
1371 1371 class MergeCheck(object):
1372 1372 """
1373 1373 Perform Merge Checks and returns a check object which stores information
1374 1374 about merge errors, and merge conditions
1375 1375 """
1376 1376 TODO_CHECK = 'todo'
1377 1377 PERM_CHECK = 'perm'
1378 1378 REVIEW_CHECK = 'review'
1379 1379 MERGE_CHECK = 'merge'
1380 1380
1381 1381 def __init__(self):
1382 1382 self.review_status = None
1383 1383 self.merge_possible = None
1384 1384 self.merge_msg = ''
1385 1385 self.failed = None
1386 1386 self.errors = []
1387 1387 self.error_details = OrderedDict()
1388 1388
1389 1389 def push_error(self, error_type, message, error_key, details):
1390 1390 self.failed = True
1391 1391 self.errors.append([error_type, message])
1392 1392 self.error_details[error_key] = dict(
1393 1393 details=details,
1394 1394 error_type=error_type,
1395 1395 message=message
1396 1396 )
1397 1397
1398 1398 @classmethod
1399 1399 def validate(cls, pull_request, user, fail_early=False, translator=None):
1400 1400 # if migrated to pyramid...
1401 1401 # _ = lambda: translator or _ # use passed in translator if any
1402 1402
1403 1403 merge_check = cls()
1404 1404
1405 1405 # permissions to merge
1406 1406 user_allowed_to_merge = PullRequestModel().check_user_merge(
1407 1407 pull_request, user)
1408 1408 if not user_allowed_to_merge:
1409 1409 log.debug("MergeCheck: cannot merge, approval is pending.")
1410 1410
1411 1411 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1412 1412 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1413 1413 if fail_early:
1414 1414 return merge_check
1415 1415
1416 1416 # review status, must be always present
1417 1417 review_status = pull_request.calculated_review_status()
1418 1418 merge_check.review_status = review_status
1419 1419
1420 1420 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1421 1421 if not status_approved:
1422 1422 log.debug("MergeCheck: cannot merge, approval is pending.")
1423 1423
1424 1424 msg = _('Pull request reviewer approval is pending.')
1425 1425
1426 1426 merge_check.push_error(
1427 1427 'warning', msg, cls.REVIEW_CHECK, review_status)
1428 1428
1429 1429 if fail_early:
1430 1430 return merge_check
1431 1431
1432 1432 # left over TODOs
1433 1433 todos = CommentsModel().get_unresolved_todos(pull_request)
1434 1434 if todos:
1435 1435 log.debug("MergeCheck: cannot merge, {} "
1436 1436 "unresolved todos left.".format(len(todos)))
1437 1437
1438 1438 if len(todos) == 1:
1439 1439 msg = _('Cannot merge, {} TODO still not resolved.').format(
1440 1440 len(todos))
1441 1441 else:
1442 1442 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1443 1443 len(todos))
1444 1444
1445 1445 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1446 1446
1447 1447 if fail_early:
1448 1448 return merge_check
1449 1449
1450 1450 # merge possible
1451 1451 merge_status, msg = PullRequestModel().merge_status(pull_request)
1452 1452 merge_check.merge_possible = merge_status
1453 1453 merge_check.merge_msg = msg
1454 1454 if not merge_status:
1455 1455 log.debug(
1456 1456 "MergeCheck: cannot merge, pull request merge not possible.")
1457 1457 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1458 1458
1459 1459 if fail_early:
1460 1460 return merge_check
1461 1461
1462 1462 return merge_check
1463 1463
1464 1464
1465 1465 ChangeTuple = namedtuple('ChangeTuple',
1466 1466 ['added', 'common', 'removed', 'total'])
1467 1467
1468 1468 FileChangeTuple = namedtuple('FileChangeTuple',
1469 1469 ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now