##// END OF EJS Templates
reviewers: only require a review when we have reviewers defined....
milka -
r4561:c0ecf0a3 default
parent child Browse files
Show More
@@ -1,2230 +1,2233 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 log.debug('Obtaining commit authors from set of commits')
158 158 _compare_data = target_scm.compare(
159 159 target_ref, source_ref, source_scm, merge=True,
160 160 pre_load=["author", "date", "message"]
161 161 )
162 162
163 163 for commit in _compare_data:
164 164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 165 # at this function which is later called via JSON serialization
166 166 serialized_commit = dict(
167 167 author=commit.author,
168 168 date=commit.date,
169 169 message=commit.message,
170 170 commit_id=commit.raw_id,
171 171 raw_id=commit.raw_id
172 172 )
173 173 commits.append(serialized_commit)
174 174 user = User.get_from_cs_author(serialized_commit['author'])
175 175 if user and user not in commit_authors:
176 176 commit_authors.append(user)
177 177
178 178 # lines
179 179 if get_authors:
180 180 log.debug('Calculating authors of changed files')
181 181 target_commit = source_repo.get_commit(ancestor_id)
182 182
183 183 for fname, lines in changed_lines.items():
184 184
185 185 try:
186 186 node = target_commit.get_node(fname, pre_load=["is_binary"])
187 187 except Exception:
188 188 log.exception("Failed to load node with path %s", fname)
189 189 continue
190 190
191 191 if not isinstance(node, FileNode):
192 192 continue
193 193
194 194 # NOTE(marcink): for binary node we don't do annotation, just use last author
195 195 if node.is_binary:
196 196 author = node.last_commit.author
197 197 email = node.last_commit.author_email
198 198
199 199 user = User.get_from_cs_author(author)
200 200 if user:
201 201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
202 202 author_counts[author] = author_counts.get(author, 0) + 1
203 203 email_counts[email] = email_counts.get(email, 0) + 1
204 204
205 205 continue
206 206
207 207 for annotation in node.annotate:
208 208 line_no, commit_id, get_commit_func, line_text = annotation
209 209 if line_no in lines:
210 210 if commit_id not in _commit_cache:
211 211 _commit_cache[commit_id] = get_commit_func()
212 212 commit = _commit_cache[commit_id]
213 213 author = commit.author
214 214 email = commit.author_email
215 215 user = User.get_from_cs_author(author)
216 216 if user:
217 217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
218 218 author_counts[author] = author_counts.get(author, 0) + 1
219 219 email_counts[email] = email_counts.get(email, 0) + 1
220 220
221 221 log.debug('Default reviewers processing finished')
222 222
223 223 return {
224 224 'commits': commits,
225 225 'files': all_files_changes,
226 226 'stats': stats,
227 227 'ancestor': ancestor_id,
228 228 # original authors of modified files
229 229 'original_authors': {
230 230 'users': user_counts,
231 231 'authors': author_counts,
232 232 'emails': email_counts,
233 233 },
234 234 'commit_authors': commit_authors
235 235 }
236 236
237 237
238 238 class PullRequestModel(BaseModel):
239 239
240 240 cls = PullRequest
241 241
242 242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
243 243
244 244 UPDATE_STATUS_MESSAGES = {
245 245 UpdateFailureReason.NONE: lazy_ugettext(
246 246 'Pull request update successful.'),
247 247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
248 248 'Pull request update failed because of an unknown error.'),
249 249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
250 250 'No update needed because the source and target have not changed.'),
251 251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
252 252 'Pull request cannot be updated because the reference type is '
253 253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
254 254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
255 255 'This pull request cannot be updated because the target '
256 256 'reference is missing.'),
257 257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
258 258 'This pull request cannot be updated because the source '
259 259 'reference is missing.'),
260 260 }
261 261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
262 262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
263 263
264 264 def __get_pull_request(self, pull_request):
265 265 return self._get_instance((
266 266 PullRequest, PullRequestVersion), pull_request)
267 267
268 268 def _check_perms(self, perms, pull_request, user, api=False):
269 269 if not api:
270 270 return h.HasRepoPermissionAny(*perms)(
271 271 user=user, repo_name=pull_request.target_repo.repo_name)
272 272 else:
273 273 return h.HasRepoPermissionAnyApi(*perms)(
274 274 user=user, repo_name=pull_request.target_repo.repo_name)
275 275
276 276 def check_user_read(self, pull_request, user, api=False):
277 277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
278 278 return self._check_perms(_perms, pull_request, user, api)
279 279
280 280 def check_user_merge(self, pull_request, user, api=False):
281 281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
282 282 return self._check_perms(_perms, pull_request, user, api)
283 283
284 284 def check_user_update(self, pull_request, user, api=False):
285 285 owner = user.user_id == pull_request.user_id
286 286 return self.check_user_merge(pull_request, user, api) or owner
287 287
288 288 def check_user_delete(self, pull_request, user):
289 289 owner = user.user_id == pull_request.user_id
290 290 _perms = ('repository.admin',)
291 291 return self._check_perms(_perms, pull_request, user) or owner
292 292
293 293 def is_user_reviewer(self, pull_request, user):
294 294 return user.user_id in [
295 295 x.user_id for x in
296 296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
297 297 if x.user
298 298 ]
299 299
300 300 def check_user_change_status(self, pull_request, user, api=False):
301 301 return self.check_user_update(pull_request, user, api) \
302 302 or self.is_user_reviewer(pull_request, user)
303 303
304 304 def check_user_comment(self, pull_request, user):
305 305 owner = user.user_id == pull_request.user_id
306 306 return self.check_user_read(pull_request, user) or owner
307 307
308 308 def get(self, pull_request):
309 309 return self.__get_pull_request(pull_request)
310 310
311 311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
312 312 statuses=None, opened_by=None, order_by=None,
313 313 order_dir='desc', only_created=False):
314 314 repo = None
315 315 if repo_name:
316 316 repo = self._get_repo(repo_name)
317 317
318 318 q = PullRequest.query()
319 319
320 320 if search_q:
321 321 like_expression = u'%{}%'.format(safe_unicode(search_q))
322 322 q = q.join(User)
323 323 q = q.filter(or_(
324 324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
325 325 User.username.ilike(like_expression),
326 326 PullRequest.title.ilike(like_expression),
327 327 PullRequest.description.ilike(like_expression),
328 328 ))
329 329
330 330 # source or target
331 331 if repo and source:
332 332 q = q.filter(PullRequest.source_repo == repo)
333 333 elif repo:
334 334 q = q.filter(PullRequest.target_repo == repo)
335 335
336 336 # closed,opened
337 337 if statuses:
338 338 q = q.filter(PullRequest.status.in_(statuses))
339 339
340 340 # opened by filter
341 341 if opened_by:
342 342 q = q.filter(PullRequest.user_id.in_(opened_by))
343 343
344 344 # only get those that are in "created" state
345 345 if only_created:
346 346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'id': PullRequest.pull_request_id,
352 352 'title': PullRequest.title,
353 353 'updated_on_raw': PullRequest.updated_on,
354 354 'target_repo': PullRequest.target_repo_id
355 355 }
356 356 if order_dir == 'asc':
357 357 q = q.order_by(order_map[order_by].asc())
358 358 else:
359 359 q = q.order_by(order_map[order_by].desc())
360 360
361 361 return q
362 362
363 363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
364 364 opened_by=None):
365 365 """
366 366 Count the number of pull requests for a specific repository.
367 367
368 368 :param repo_name: target or source repo
369 369 :param search_q: filter by text
370 370 :param source: boolean flag to specify if repo_name refers to source
371 371 :param statuses: list of pull request statuses
372 372 :param opened_by: author user of the pull request
373 373 :returns: int number of pull requests
374 374 """
375 375 q = self._prepare_get_all_query(
376 376 repo_name, search_q=search_q, source=source, statuses=statuses,
377 377 opened_by=opened_by)
378 378
379 379 return q.count()
380 380
381 381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
382 382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
383 383 """
384 384 Get all pull requests for a specific repository.
385 385
386 386 :param repo_name: target or source repo
387 387 :param search_q: filter by text
388 388 :param source: boolean flag to specify if repo_name refers to source
389 389 :param statuses: list of pull request statuses
390 390 :param opened_by: author user of the pull request
391 391 :param offset: pagination offset
392 392 :param length: length of returned list
393 393 :param order_by: order of the returned list
394 394 :param order_dir: 'asc' or 'desc' ordering direction
395 395 :returns: list of pull requests
396 396 """
397 397 q = self._prepare_get_all_query(
398 398 repo_name, search_q=search_q, source=source, statuses=statuses,
399 399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
400 400
401 401 if length:
402 402 pull_requests = q.limit(length).offset(offset).all()
403 403 else:
404 404 pull_requests = q.all()
405 405
406 406 return pull_requests
407 407
408 408 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
409 409 opened_by=None):
410 410 """
411 411 Count the number of pull requests for a specific repository that are
412 412 awaiting review.
413 413
414 414 :param repo_name: target or source repo
415 415 :param search_q: filter by text
416 416 :param source: boolean flag to specify if repo_name refers to source
417 417 :param statuses: list of pull request statuses
418 418 :param opened_by: author user of the pull request
419 419 :returns: int number of pull requests
420 420 """
421 421 pull_requests = self.get_awaiting_review(
422 422 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
423 423
424 424 return len(pull_requests)
425 425
426 426 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
427 427 opened_by=None, offset=0, length=None,
428 428 order_by=None, order_dir='desc'):
429 429 """
430 430 Get all pull requests for a specific repository that are awaiting
431 431 review.
432 432
433 433 :param repo_name: target or source repo
434 434 :param search_q: filter by text
435 435 :param source: boolean flag to specify if repo_name refers to source
436 436 :param statuses: list of pull request statuses
437 437 :param opened_by: author user of the pull request
438 438 :param offset: pagination offset
439 439 :param length: length of returned list
440 440 :param order_by: order of the returned list
441 441 :param order_dir: 'asc' or 'desc' ordering direction
442 442 :returns: list of pull requests
443 443 """
444 444 pull_requests = self.get_all(
445 445 repo_name, search_q=search_q, source=source, statuses=statuses,
446 446 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
447 447
448 448 _filtered_pull_requests = []
449 449 for pr in pull_requests:
450 450 status = pr.calculated_review_status()
451 451 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
452 452 ChangesetStatus.STATUS_UNDER_REVIEW]:
453 453 _filtered_pull_requests.append(pr)
454 454 if length:
455 455 return _filtered_pull_requests[offset:offset+length]
456 456 else:
457 457 return _filtered_pull_requests
458 458
459 459 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
460 460 opened_by=None, user_id=None):
461 461 """
462 462 Count the number of pull requests for a specific repository that are
463 463 awaiting review from a specific user.
464 464
465 465 :param repo_name: target or source repo
466 466 :param search_q: filter by text
467 467 :param source: boolean flag to specify if repo_name refers to source
468 468 :param statuses: list of pull request statuses
469 469 :param opened_by: author user of the pull request
470 470 :param user_id: reviewer user of the pull request
471 471 :returns: int number of pull requests
472 472 """
473 473 pull_requests = self.get_awaiting_my_review(
474 474 repo_name, search_q=search_q, source=source, statuses=statuses,
475 475 opened_by=opened_by, user_id=user_id)
476 476
477 477 return len(pull_requests)
478 478
479 479 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
480 480 opened_by=None, user_id=None, offset=0,
481 481 length=None, order_by=None, order_dir='desc'):
482 482 """
483 483 Get all pull requests for a specific repository that are awaiting
484 484 review from a specific user.
485 485
486 486 :param repo_name: target or source repo
487 487 :param search_q: filter by text
488 488 :param source: boolean flag to specify if repo_name refers to source
489 489 :param statuses: list of pull request statuses
490 490 :param opened_by: author user of the pull request
491 491 :param user_id: reviewer user of the pull request
492 492 :param offset: pagination offset
493 493 :param length: length of returned list
494 494 :param order_by: order of the returned list
495 495 :param order_dir: 'asc' or 'desc' ordering direction
496 496 :returns: list of pull requests
497 497 """
498 498 pull_requests = self.get_all(
499 499 repo_name, search_q=search_q, source=source, statuses=statuses,
500 500 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
501 501
502 502 _my = PullRequestModel().get_not_reviewed(user_id)
503 503 my_participation = []
504 504 for pr in pull_requests:
505 505 if pr in _my:
506 506 my_participation.append(pr)
507 507 _filtered_pull_requests = my_participation
508 508 if length:
509 509 return _filtered_pull_requests[offset:offset+length]
510 510 else:
511 511 return _filtered_pull_requests
512 512
513 513 def get_not_reviewed(self, user_id):
514 514 return [
515 515 x.pull_request for x in PullRequestReviewers.query().filter(
516 516 PullRequestReviewers.user_id == user_id).all()
517 517 ]
518 518
519 519 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
520 520 order_by=None, order_dir='desc'):
521 521 q = PullRequest.query()
522 522 if user_id:
523 523 reviewers_subquery = Session().query(
524 524 PullRequestReviewers.pull_request_id).filter(
525 525 PullRequestReviewers.user_id == user_id).subquery()
526 526 user_filter = or_(
527 527 PullRequest.user_id == user_id,
528 528 PullRequest.pull_request_id.in_(reviewers_subquery)
529 529 )
530 530 q = PullRequest.query().filter(user_filter)
531 531
532 532 # closed,opened
533 533 if statuses:
534 534 q = q.filter(PullRequest.status.in_(statuses))
535 535
536 536 if query:
537 537 like_expression = u'%{}%'.format(safe_unicode(query))
538 538 q = q.join(User)
539 539 q = q.filter(or_(
540 540 cast(PullRequest.pull_request_id, String).ilike(like_expression),
541 541 User.username.ilike(like_expression),
542 542 PullRequest.title.ilike(like_expression),
543 543 PullRequest.description.ilike(like_expression),
544 544 ))
545 545 if order_by:
546 546 order_map = {
547 547 'name_raw': PullRequest.pull_request_id,
548 548 'title': PullRequest.title,
549 549 'updated_on_raw': PullRequest.updated_on,
550 550 'target_repo': PullRequest.target_repo_id
551 551 }
552 552 if order_dir == 'asc':
553 553 q = q.order_by(order_map[order_by].asc())
554 554 else:
555 555 q = q.order_by(order_map[order_by].desc())
556 556
557 557 return q
558 558
559 559 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
560 560 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
561 561 return q.count()
562 562
563 563 def get_im_participating_in(
564 564 self, user_id=None, statuses=None, query='', offset=0,
565 565 length=None, order_by=None, order_dir='desc'):
566 566 """
567 567 Get all Pull requests that i'm participating in, or i have opened
568 568 """
569 569
570 570 q = self._prepare_participating_query(
571 571 user_id, statuses=statuses, query=query, order_by=order_by,
572 572 order_dir=order_dir)
573 573
574 574 if length:
575 575 pull_requests = q.limit(length).offset(offset).all()
576 576 else:
577 577 pull_requests = q.all()
578 578
579 579 return pull_requests
580 580
581 581 def get_versions(self, pull_request):
582 582 """
583 583 returns version of pull request sorted by ID descending
584 584 """
585 585 return PullRequestVersion.query()\
586 586 .filter(PullRequestVersion.pull_request == pull_request)\
587 587 .order_by(PullRequestVersion.pull_request_version_id.asc())\
588 588 .all()
589 589
590 590 def get_pr_version(self, pull_request_id, version=None):
591 591 at_version = None
592 592
593 593 if version and version == 'latest':
594 594 pull_request_ver = PullRequest.get(pull_request_id)
595 595 pull_request_obj = pull_request_ver
596 596 _org_pull_request_obj = pull_request_obj
597 597 at_version = 'latest'
598 598 elif version:
599 599 pull_request_ver = PullRequestVersion.get_or_404(version)
600 600 pull_request_obj = pull_request_ver
601 601 _org_pull_request_obj = pull_request_ver.pull_request
602 602 at_version = pull_request_ver.pull_request_version_id
603 603 else:
604 604 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
605 605 pull_request_id)
606 606
607 607 pull_request_display_obj = PullRequest.get_pr_display_object(
608 608 pull_request_obj, _org_pull_request_obj)
609 609
610 610 return _org_pull_request_obj, pull_request_obj, \
611 611 pull_request_display_obj, at_version
612 612
613 613 def create(self, created_by, source_repo, source_ref, target_repo,
614 614 target_ref, revisions, reviewers, observers, title, description=None,
615 615 common_ancestor_id=None,
616 616 description_renderer=None,
617 617 reviewer_data=None, translator=None, auth_user=None):
618 618 translator = translator or get_current_request().translate
619 619
620 620 created_by_user = self._get_user(created_by)
621 621 auth_user = auth_user or created_by_user.AuthUser()
622 622 source_repo = self._get_repo(source_repo)
623 623 target_repo = self._get_repo(target_repo)
624 624
625 625 pull_request = PullRequest()
626 626 pull_request.source_repo = source_repo
627 627 pull_request.source_ref = source_ref
628 628 pull_request.target_repo = target_repo
629 629 pull_request.target_ref = target_ref
630 630 pull_request.revisions = revisions
631 631 pull_request.title = title
632 632 pull_request.description = description
633 633 pull_request.description_renderer = description_renderer
634 634 pull_request.author = created_by_user
635 635 pull_request.reviewer_data = reviewer_data
636 636 pull_request.pull_request_state = pull_request.STATE_CREATING
637 637 pull_request.common_ancestor_id = common_ancestor_id
638 638
639 639 Session().add(pull_request)
640 640 Session().flush()
641 641
642 642 reviewer_ids = set()
643 643 # members / reviewers
644 644 for reviewer_object in reviewers:
645 645 user_id, reasons, mandatory, role, rules = reviewer_object
646 646 user = self._get_user(user_id)
647 647
648 648 # skip duplicates
649 649 if user.user_id in reviewer_ids:
650 650 continue
651 651
652 652 reviewer_ids.add(user.user_id)
653 653
654 654 reviewer = PullRequestReviewers()
655 655 reviewer.user = user
656 656 reviewer.pull_request = pull_request
657 657 reviewer.reasons = reasons
658 658 reviewer.mandatory = mandatory
659 659 reviewer.role = role
660 660
661 661 # NOTE(marcink): pick only first rule for now
662 662 rule_id = list(rules)[0] if rules else None
663 663 rule = RepoReviewRule.get(rule_id) if rule_id else None
664 664 if rule:
665 665 review_group = rule.user_group_vote_rule(user_id)
666 666 # we check if this particular reviewer is member of a voting group
667 667 if review_group:
668 668 # NOTE(marcink):
669 669 # can be that user is member of more but we pick the first same,
670 670 # same as default reviewers algo
671 671 review_group = review_group[0]
672 672
673 673 rule_data = {
674 674 'rule_name':
675 675 rule.review_rule_name,
676 676 'rule_user_group_entry_id':
677 677 review_group.repo_review_rule_users_group_id,
678 678 'rule_user_group_name':
679 679 review_group.users_group.users_group_name,
680 680 'rule_user_group_members':
681 681 [x.user.username for x in review_group.users_group.members],
682 682 'rule_user_group_members_id':
683 683 [x.user.user_id for x in review_group.users_group.members],
684 684 }
685 685 # e.g {'vote_rule': -1, 'mandatory': True}
686 686 rule_data.update(review_group.rule_data())
687 687
688 688 reviewer.rule_data = rule_data
689 689
690 690 Session().add(reviewer)
691 691 Session().flush()
692 692
693 693 for observer_object in observers:
694 694 user_id, reasons, mandatory, role, rules = observer_object
695 695 user = self._get_user(user_id)
696 696
697 697 # skip duplicates from reviewers
698 698 if user.user_id in reviewer_ids:
699 699 continue
700 700
701 701 #reviewer_ids.add(user.user_id)
702 702
703 703 observer = PullRequestReviewers()
704 704 observer.user = user
705 705 observer.pull_request = pull_request
706 706 observer.reasons = reasons
707 707 observer.mandatory = mandatory
708 708 observer.role = role
709 709
710 710 # NOTE(marcink): pick only first rule for now
711 711 rule_id = list(rules)[0] if rules else None
712 712 rule = RepoReviewRule.get(rule_id) if rule_id else None
713 713 if rule:
714 714 # TODO(marcink): do we need this for observers ??
715 715 pass
716 716
717 717 Session().add(observer)
718 718 Session().flush()
719 719
720 720 # Set approval status to "Under Review" for all commits which are
721 721 # part of this pull request.
722 722 ChangesetStatusModel().set_status(
723 723 repo=target_repo,
724 724 status=ChangesetStatus.STATUS_UNDER_REVIEW,
725 725 user=created_by_user,
726 726 pull_request=pull_request
727 727 )
728 728 # we commit early at this point. This has to do with a fact
729 729 # that before queries do some row-locking. And because of that
730 730 # we need to commit and finish transaction before below validate call
731 731 # that for large repos could be long resulting in long row locks
732 732 Session().commit()
733 733
734 734 # prepare workspace, and run initial merge simulation. Set state during that
735 735 # operation
736 736 pull_request = PullRequest.get(pull_request.pull_request_id)
737 737
738 738 # set as merging, for merge simulation, and if finished to created so we mark
739 739 # simulation is working fine
740 740 with pull_request.set_state(PullRequest.STATE_MERGING,
741 741 final_state=PullRequest.STATE_CREATED) as state_obj:
742 742 MergeCheck.validate(
743 743 pull_request, auth_user=auth_user, translator=translator)
744 744
745 745 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
746 746 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
747 747
748 748 creation_data = pull_request.get_api_data(with_merge_state=False)
749 749 self._log_audit_action(
750 750 'repo.pull_request.create', {'data': creation_data},
751 751 auth_user, pull_request)
752 752
753 753 return pull_request
754 754
755 755 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
756 756 pull_request = self.__get_pull_request(pull_request)
757 757 target_scm = pull_request.target_repo.scm_instance()
758 758 if action == 'create':
759 759 trigger_hook = hooks_utils.trigger_create_pull_request_hook
760 760 elif action == 'merge':
761 761 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
762 762 elif action == 'close':
763 763 trigger_hook = hooks_utils.trigger_close_pull_request_hook
764 764 elif action == 'review_status_change':
765 765 trigger_hook = hooks_utils.trigger_review_pull_request_hook
766 766 elif action == 'update':
767 767 trigger_hook = hooks_utils.trigger_update_pull_request_hook
768 768 elif action == 'comment':
769 769 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
770 770 elif action == 'comment_edit':
771 771 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
772 772 else:
773 773 return
774 774
775 775 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
776 776 pull_request, action, trigger_hook)
777 777 trigger_hook(
778 778 username=user.username,
779 779 repo_name=pull_request.target_repo.repo_name,
780 780 repo_type=target_scm.alias,
781 781 pull_request=pull_request,
782 782 data=data)
783 783
784 784 def _get_commit_ids(self, pull_request):
785 785 """
786 786 Return the commit ids of the merged pull request.
787 787
788 788 This method is not dealing correctly yet with the lack of autoupdates
789 789 nor with the implicit target updates.
790 790 For example: if a commit in the source repo is already in the target it
791 791 will be reported anyways.
792 792 """
793 793 merge_rev = pull_request.merge_rev
794 794 if merge_rev is None:
795 795 raise ValueError('This pull request was not merged yet')
796 796
797 797 commit_ids = list(pull_request.revisions)
798 798 if merge_rev not in commit_ids:
799 799 commit_ids.append(merge_rev)
800 800
801 801 return commit_ids
802 802
803 803 def merge_repo(self, pull_request, user, extras):
804 804 log.debug("Merging pull request %s", pull_request.pull_request_id)
805 805 extras['user_agent'] = 'internal-merge'
806 806 merge_state = self._merge_pull_request(pull_request, user, extras)
807 807 if merge_state.executed:
808 808 log.debug("Merge was successful, updating the pull request comments.")
809 809 self._comment_and_close_pr(pull_request, user, merge_state)
810 810
811 811 self._log_audit_action(
812 812 'repo.pull_request.merge',
813 813 {'merge_state': merge_state.__dict__},
814 814 user, pull_request)
815 815
816 816 else:
817 817 log.warn("Merge failed, not updating the pull request.")
818 818 return merge_state
819 819
820 820 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
821 821 target_vcs = pull_request.target_repo.scm_instance()
822 822 source_vcs = pull_request.source_repo.scm_instance()
823 823
824 824 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
825 825 pr_id=pull_request.pull_request_id,
826 826 pr_title=pull_request.title,
827 827 source_repo=source_vcs.name,
828 828 source_ref_name=pull_request.source_ref_parts.name,
829 829 target_repo=target_vcs.name,
830 830 target_ref_name=pull_request.target_ref_parts.name,
831 831 )
832 832
833 833 workspace_id = self._workspace_id(pull_request)
834 834 repo_id = pull_request.target_repo.repo_id
835 835 use_rebase = self._use_rebase_for_merging(pull_request)
836 836 close_branch = self._close_branch_before_merging(pull_request)
837 837 user_name = self._user_name_for_merging(pull_request, user)
838 838
839 839 target_ref = self._refresh_reference(
840 840 pull_request.target_ref_parts, target_vcs)
841 841
842 842 callback_daemon, extras = prepare_callback_daemon(
843 843 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
844 844 host=vcs_settings.HOOKS_HOST,
845 845 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
846 846
847 847 with callback_daemon:
848 848 # TODO: johbo: Implement a clean way to run a config_override
849 849 # for a single call.
850 850 target_vcs.config.set(
851 851 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
852 852
853 853 merge_state = target_vcs.merge(
854 854 repo_id, workspace_id, target_ref, source_vcs,
855 855 pull_request.source_ref_parts,
856 856 user_name=user_name, user_email=user.email,
857 857 message=message, use_rebase=use_rebase,
858 858 close_branch=close_branch)
859 859 return merge_state
860 860
861 861 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
862 862 pull_request.merge_rev = merge_state.merge_ref.commit_id
863 863 pull_request.updated_on = datetime.datetime.now()
864 864 close_msg = close_msg or 'Pull request merged and closed'
865 865
866 866 CommentsModel().create(
867 867 text=safe_unicode(close_msg),
868 868 repo=pull_request.target_repo.repo_id,
869 869 user=user.user_id,
870 870 pull_request=pull_request.pull_request_id,
871 871 f_path=None,
872 872 line_no=None,
873 873 closing_pr=True
874 874 )
875 875
876 876 Session().add(pull_request)
877 877 Session().flush()
878 878 # TODO: paris: replace invalidation with less radical solution
879 879 ScmModel().mark_for_invalidation(
880 880 pull_request.target_repo.repo_name)
881 881 self.trigger_pull_request_hook(pull_request, user, 'merge')
882 882
883 883 def has_valid_update_type(self, pull_request):
884 884 source_ref_type = pull_request.source_ref_parts.type
885 885 return source_ref_type in self.REF_TYPES
886 886
887 887 def get_flow_commits(self, pull_request):
888 888
889 889 # source repo
890 890 source_ref_name = pull_request.source_ref_parts.name
891 891 source_ref_type = pull_request.source_ref_parts.type
892 892 source_ref_id = pull_request.source_ref_parts.commit_id
893 893 source_repo = pull_request.source_repo.scm_instance()
894 894
895 895 try:
896 896 if source_ref_type in self.REF_TYPES:
897 897 source_commit = source_repo.get_commit(source_ref_name)
898 898 else:
899 899 source_commit = source_repo.get_commit(source_ref_id)
900 900 except CommitDoesNotExistError:
901 901 raise SourceRefMissing()
902 902
903 903 # target repo
904 904 target_ref_name = pull_request.target_ref_parts.name
905 905 target_ref_type = pull_request.target_ref_parts.type
906 906 target_ref_id = pull_request.target_ref_parts.commit_id
907 907 target_repo = pull_request.target_repo.scm_instance()
908 908
909 909 try:
910 910 if target_ref_type in self.REF_TYPES:
911 911 target_commit = target_repo.get_commit(target_ref_name)
912 912 else:
913 913 target_commit = target_repo.get_commit(target_ref_id)
914 914 except CommitDoesNotExistError:
915 915 raise TargetRefMissing()
916 916
917 917 return source_commit, target_commit
918 918
919 919 def update_commits(self, pull_request, updating_user):
920 920 """
921 921 Get the updated list of commits for the pull request
922 922 and return the new pull request version and the list
923 923 of commits processed by this update action
924 924
925 925 updating_user is the user_object who triggered the update
926 926 """
927 927 pull_request = self.__get_pull_request(pull_request)
928 928 source_ref_type = pull_request.source_ref_parts.type
929 929 source_ref_name = pull_request.source_ref_parts.name
930 930 source_ref_id = pull_request.source_ref_parts.commit_id
931 931
932 932 target_ref_type = pull_request.target_ref_parts.type
933 933 target_ref_name = pull_request.target_ref_parts.name
934 934 target_ref_id = pull_request.target_ref_parts.commit_id
935 935
936 936 if not self.has_valid_update_type(pull_request):
937 937 log.debug("Skipping update of pull request %s due to ref type: %s",
938 938 pull_request, source_ref_type)
939 939 return UpdateResponse(
940 940 executed=False,
941 941 reason=UpdateFailureReason.WRONG_REF_TYPE,
942 942 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
943 943 source_changed=False, target_changed=False)
944 944
945 945 try:
946 946 source_commit, target_commit = self.get_flow_commits(pull_request)
947 947 except SourceRefMissing:
948 948 return UpdateResponse(
949 949 executed=False,
950 950 reason=UpdateFailureReason.MISSING_SOURCE_REF,
951 951 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
952 952 source_changed=False, target_changed=False)
953 953 except TargetRefMissing:
954 954 return UpdateResponse(
955 955 executed=False,
956 956 reason=UpdateFailureReason.MISSING_TARGET_REF,
957 957 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
958 958 source_changed=False, target_changed=False)
959 959
960 960 source_changed = source_ref_id != source_commit.raw_id
961 961 target_changed = target_ref_id != target_commit.raw_id
962 962
963 963 if not (source_changed or target_changed):
964 964 log.debug("Nothing changed in pull request %s", pull_request)
965 965 return UpdateResponse(
966 966 executed=False,
967 967 reason=UpdateFailureReason.NO_CHANGE,
968 968 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
969 969 source_changed=target_changed, target_changed=source_changed)
970 970
971 971 change_in_found = 'target repo' if target_changed else 'source repo'
972 972 log.debug('Updating pull request because of change in %s detected',
973 973 change_in_found)
974 974
975 975 # Finally there is a need for an update, in case of source change
976 976 # we create a new version, else just an update
977 977 if source_changed:
978 978 pull_request_version = self._create_version_from_snapshot(pull_request)
979 979 self._link_comments_to_version(pull_request_version)
980 980 else:
981 981 try:
982 982 ver = pull_request.versions[-1]
983 983 except IndexError:
984 984 ver = None
985 985
986 986 pull_request.pull_request_version_id = \
987 987 ver.pull_request_version_id if ver else None
988 988 pull_request_version = pull_request
989 989
990 990 source_repo = pull_request.source_repo.scm_instance()
991 991 target_repo = pull_request.target_repo.scm_instance()
992 992
993 993 # re-compute commit ids
994 994 old_commit_ids = pull_request.revisions
995 995 pre_load = ["author", "date", "message", "branch"]
996 996 commit_ranges = target_repo.compare(
997 997 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
998 998 pre_load=pre_load)
999 999
1000 1000 target_ref = target_commit.raw_id
1001 1001 source_ref = source_commit.raw_id
1002 1002 ancestor_commit_id = target_repo.get_common_ancestor(
1003 1003 target_ref, source_ref, source_repo)
1004 1004
1005 1005 if not ancestor_commit_id:
1006 1006 raise ValueError(
1007 1007 'cannot calculate diff info without a common ancestor. '
1008 1008 'Make sure both repositories are related, and have a common forking commit.')
1009 1009
1010 1010 pull_request.common_ancestor_id = ancestor_commit_id
1011 1011
1012 1012 pull_request.source_ref = '%s:%s:%s' % (
1013 1013 source_ref_type, source_ref_name, source_commit.raw_id)
1014 1014 pull_request.target_ref = '%s:%s:%s' % (
1015 1015 target_ref_type, target_ref_name, ancestor_commit_id)
1016 1016
1017 1017 pull_request.revisions = [
1018 1018 commit.raw_id for commit in reversed(commit_ranges)]
1019 1019 pull_request.updated_on = datetime.datetime.now()
1020 1020 Session().add(pull_request)
1021 1021 new_commit_ids = pull_request.revisions
1022 1022
1023 1023 old_diff_data, new_diff_data = self._generate_update_diffs(
1024 1024 pull_request, pull_request_version)
1025 1025
1026 1026 # calculate commit and file changes
1027 1027 commit_changes = self._calculate_commit_id_changes(
1028 1028 old_commit_ids, new_commit_ids)
1029 1029 file_changes = self._calculate_file_changes(
1030 1030 old_diff_data, new_diff_data)
1031 1031
1032 1032 # set comments as outdated if DIFFS changed
1033 1033 CommentsModel().outdate_comments(
1034 1034 pull_request, old_diff_data=old_diff_data,
1035 1035 new_diff_data=new_diff_data)
1036 1036
1037 1037 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1038 1038 file_node_changes = (
1039 1039 file_changes.added or file_changes.modified or file_changes.removed)
1040 1040 pr_has_changes = valid_commit_changes or file_node_changes
1041 1041
1042 1042 # Add an automatic comment to the pull request, in case
1043 1043 # anything has changed
1044 1044 if pr_has_changes:
1045 1045 update_comment = CommentsModel().create(
1046 1046 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1047 1047 repo=pull_request.target_repo,
1048 1048 user=pull_request.author,
1049 1049 pull_request=pull_request,
1050 1050 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1051 1051
1052 1052 # Update status to "Under Review" for added commits
1053 1053 for commit_id in commit_changes.added:
1054 1054 ChangesetStatusModel().set_status(
1055 1055 repo=pull_request.source_repo,
1056 1056 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1057 1057 comment=update_comment,
1058 1058 user=pull_request.author,
1059 1059 pull_request=pull_request,
1060 1060 revision=commit_id)
1061 1061
1062 1062 # send update email to users
1063 1063 try:
1064 1064 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1065 1065 ancestor_commit_id=ancestor_commit_id,
1066 1066 commit_changes=commit_changes,
1067 1067 file_changes=file_changes)
1068 1068 except Exception:
1069 1069 log.exception('Failed to send email notification to users')
1070 1070
1071 1071 log.debug(
1072 1072 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1073 1073 'removed_ids: %s', pull_request.pull_request_id,
1074 1074 commit_changes.added, commit_changes.common, commit_changes.removed)
1075 1075 log.debug(
1076 1076 'Updated pull request with the following file changes: %s',
1077 1077 file_changes)
1078 1078
1079 1079 log.info(
1080 1080 "Updated pull request %s from commit %s to commit %s, "
1081 1081 "stored new version %s of this pull request.",
1082 1082 pull_request.pull_request_id, source_ref_id,
1083 1083 pull_request.source_ref_parts.commit_id,
1084 1084 pull_request_version.pull_request_version_id)
1085 1085 Session().commit()
1086 1086 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1087 1087
1088 1088 return UpdateResponse(
1089 1089 executed=True, reason=UpdateFailureReason.NONE,
1090 1090 old=pull_request, new=pull_request_version,
1091 1091 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1092 1092 source_changed=source_changed, target_changed=target_changed)
1093 1093
1094 1094 def _create_version_from_snapshot(self, pull_request):
1095 1095 version = PullRequestVersion()
1096 1096 version.title = pull_request.title
1097 1097 version.description = pull_request.description
1098 1098 version.status = pull_request.status
1099 1099 version.pull_request_state = pull_request.pull_request_state
1100 1100 version.created_on = datetime.datetime.now()
1101 1101 version.updated_on = pull_request.updated_on
1102 1102 version.user_id = pull_request.user_id
1103 1103 version.source_repo = pull_request.source_repo
1104 1104 version.source_ref = pull_request.source_ref
1105 1105 version.target_repo = pull_request.target_repo
1106 1106 version.target_ref = pull_request.target_ref
1107 1107
1108 1108 version._last_merge_source_rev = pull_request._last_merge_source_rev
1109 1109 version._last_merge_target_rev = pull_request._last_merge_target_rev
1110 1110 version.last_merge_status = pull_request.last_merge_status
1111 1111 version.last_merge_metadata = pull_request.last_merge_metadata
1112 1112 version.shadow_merge_ref = pull_request.shadow_merge_ref
1113 1113 version.merge_rev = pull_request.merge_rev
1114 1114 version.reviewer_data = pull_request.reviewer_data
1115 1115
1116 1116 version.revisions = pull_request.revisions
1117 1117 version.common_ancestor_id = pull_request.common_ancestor_id
1118 1118 version.pull_request = pull_request
1119 1119 Session().add(version)
1120 1120 Session().flush()
1121 1121
1122 1122 return version
1123 1123
1124 1124 def _generate_update_diffs(self, pull_request, pull_request_version):
1125 1125
1126 1126 diff_context = (
1127 1127 self.DIFF_CONTEXT +
1128 1128 CommentsModel.needed_extra_diff_context())
1129 1129 hide_whitespace_changes = False
1130 1130 source_repo = pull_request_version.source_repo
1131 1131 source_ref_id = pull_request_version.source_ref_parts.commit_id
1132 1132 target_ref_id = pull_request_version.target_ref_parts.commit_id
1133 1133 old_diff = self._get_diff_from_pr_or_version(
1134 1134 source_repo, source_ref_id, target_ref_id,
1135 1135 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1136 1136
1137 1137 source_repo = pull_request.source_repo
1138 1138 source_ref_id = pull_request.source_ref_parts.commit_id
1139 1139 target_ref_id = pull_request.target_ref_parts.commit_id
1140 1140
1141 1141 new_diff = self._get_diff_from_pr_or_version(
1142 1142 source_repo, source_ref_id, target_ref_id,
1143 1143 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1144 1144
1145 1145 old_diff_data = diffs.DiffProcessor(old_diff)
1146 1146 old_diff_data.prepare()
1147 1147 new_diff_data = diffs.DiffProcessor(new_diff)
1148 1148 new_diff_data.prepare()
1149 1149
1150 1150 return old_diff_data, new_diff_data
1151 1151
1152 1152 def _link_comments_to_version(self, pull_request_version):
1153 1153 """
1154 1154 Link all unlinked comments of this pull request to the given version.
1155 1155
1156 1156 :param pull_request_version: The `PullRequestVersion` to which
1157 1157 the comments shall be linked.
1158 1158
1159 1159 """
1160 1160 pull_request = pull_request_version.pull_request
1161 1161 comments = ChangesetComment.query()\
1162 1162 .filter(
1163 1163 # TODO: johbo: Should we query for the repo at all here?
1164 1164 # Pending decision on how comments of PRs are to be related
1165 1165 # to either the source repo, the target repo or no repo at all.
1166 1166 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1167 1167 ChangesetComment.pull_request == pull_request,
1168 1168 ChangesetComment.pull_request_version == None)\
1169 1169 .order_by(ChangesetComment.comment_id.asc())
1170 1170
1171 1171 # TODO: johbo: Find out why this breaks if it is done in a bulk
1172 1172 # operation.
1173 1173 for comment in comments:
1174 1174 comment.pull_request_version_id = (
1175 1175 pull_request_version.pull_request_version_id)
1176 1176 Session().add(comment)
1177 1177
1178 1178 def _calculate_commit_id_changes(self, old_ids, new_ids):
1179 1179 added = [x for x in new_ids if x not in old_ids]
1180 1180 common = [x for x in new_ids if x in old_ids]
1181 1181 removed = [x for x in old_ids if x not in new_ids]
1182 1182 total = new_ids
1183 1183 return ChangeTuple(added, common, removed, total)
1184 1184
1185 1185 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1186 1186
1187 1187 old_files = OrderedDict()
1188 1188 for diff_data in old_diff_data.parsed_diff:
1189 1189 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1190 1190
1191 1191 added_files = []
1192 1192 modified_files = []
1193 1193 removed_files = []
1194 1194 for diff_data in new_diff_data.parsed_diff:
1195 1195 new_filename = diff_data['filename']
1196 1196 new_hash = md5_safe(diff_data['raw_diff'])
1197 1197
1198 1198 old_hash = old_files.get(new_filename)
1199 1199 if not old_hash:
1200 1200 # file is not present in old diff, we have to figure out from parsed diff
1201 1201 # operation ADD/REMOVE
1202 1202 operations_dict = diff_data['stats']['ops']
1203 1203 if diffs.DEL_FILENODE in operations_dict:
1204 1204 removed_files.append(new_filename)
1205 1205 else:
1206 1206 added_files.append(new_filename)
1207 1207 else:
1208 1208 if new_hash != old_hash:
1209 1209 modified_files.append(new_filename)
1210 1210 # now remove a file from old, since we have seen it already
1211 1211 del old_files[new_filename]
1212 1212
1213 1213 # removed files is when there are present in old, but not in NEW,
1214 1214 # since we remove old files that are present in new diff, left-overs
1215 1215 # if any should be the removed files
1216 1216 removed_files.extend(old_files.keys())
1217 1217
1218 1218 return FileChangeTuple(added_files, modified_files, removed_files)
1219 1219
1220 1220 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1221 1221 """
1222 1222 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1223 1223 so it's always looking the same disregarding on which default
1224 1224 renderer system is using.
1225 1225
1226 1226 :param ancestor_commit_id: ancestor raw_id
1227 1227 :param changes: changes named tuple
1228 1228 :param file_changes: file changes named tuple
1229 1229
1230 1230 """
1231 1231 new_status = ChangesetStatus.get_status_lbl(
1232 1232 ChangesetStatus.STATUS_UNDER_REVIEW)
1233 1233
1234 1234 changed_files = (
1235 1235 file_changes.added + file_changes.modified + file_changes.removed)
1236 1236
1237 1237 params = {
1238 1238 'under_review_label': new_status,
1239 1239 'added_commits': changes.added,
1240 1240 'removed_commits': changes.removed,
1241 1241 'changed_files': changed_files,
1242 1242 'added_files': file_changes.added,
1243 1243 'modified_files': file_changes.modified,
1244 1244 'removed_files': file_changes.removed,
1245 1245 'ancestor_commit_id': ancestor_commit_id
1246 1246 }
1247 1247 renderer = RstTemplateRenderer()
1248 1248 return renderer.render('pull_request_update.mako', **params)
1249 1249
1250 1250 def edit(self, pull_request, title, description, description_renderer, user):
1251 1251 pull_request = self.__get_pull_request(pull_request)
1252 1252 old_data = pull_request.get_api_data(with_merge_state=False)
1253 1253 if pull_request.is_closed():
1254 1254 raise ValueError('This pull request is closed')
1255 1255 if title:
1256 1256 pull_request.title = title
1257 1257 pull_request.description = description
1258 1258 pull_request.updated_on = datetime.datetime.now()
1259 1259 pull_request.description_renderer = description_renderer
1260 1260 Session().add(pull_request)
1261 1261 self._log_audit_action(
1262 1262 'repo.pull_request.edit', {'old_data': old_data},
1263 1263 user, pull_request)
1264 1264
1265 1265 def update_reviewers(self, pull_request, reviewer_data, user):
1266 1266 """
1267 1267 Update the reviewers in the pull request
1268 1268
1269 1269 :param pull_request: the pr to update
1270 1270 :param reviewer_data: list of tuples
1271 1271 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1272 1272 :param user: current use who triggers this action
1273 1273 """
1274 1274
1275 1275 pull_request = self.__get_pull_request(pull_request)
1276 1276 if pull_request.is_closed():
1277 1277 raise ValueError('This pull request is closed')
1278 1278
1279 1279 reviewers = {}
1280 1280 for user_id, reasons, mandatory, role, rules in reviewer_data:
1281 1281 if isinstance(user_id, (int, compat.string_types)):
1282 1282 user_id = self._get_user(user_id).user_id
1283 1283 reviewers[user_id] = {
1284 1284 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1285 1285
1286 1286 reviewers_ids = set(reviewers.keys())
1287 1287 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1288 1288 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1289 1289
1290 1290 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1291 1291
1292 1292 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1293 1293 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1294 1294
1295 1295 log.debug("Adding %s reviewers", ids_to_add)
1296 1296 log.debug("Removing %s reviewers", ids_to_remove)
1297 1297 changed = False
1298 1298 added_audit_reviewers = []
1299 1299 removed_audit_reviewers = []
1300 1300
1301 1301 for uid in ids_to_add:
1302 1302 changed = True
1303 1303 _usr = self._get_user(uid)
1304 1304 reviewer = PullRequestReviewers()
1305 1305 reviewer.user = _usr
1306 1306 reviewer.pull_request = pull_request
1307 1307 reviewer.reasons = reviewers[uid]['reasons']
1308 1308 # NOTE(marcink): mandatory shouldn't be changed now
1309 1309 # reviewer.mandatory = reviewers[uid]['reasons']
1310 1310 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1311 1311 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1312 1312 Session().add(reviewer)
1313 1313 added_audit_reviewers.append(reviewer.get_dict())
1314 1314
1315 1315 for uid in ids_to_remove:
1316 1316 changed = True
1317 1317 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1318 1318 # This is an edge case that handles previous state of having the same reviewer twice.
1319 1319 # this CAN happen due to the lack of DB checks
1320 1320 reviewers = PullRequestReviewers.query()\
1321 1321 .filter(PullRequestReviewers.user_id == uid,
1322 1322 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1323 1323 PullRequestReviewers.pull_request == pull_request)\
1324 1324 .all()
1325 1325
1326 1326 for obj in reviewers:
1327 1327 added_audit_reviewers.append(obj.get_dict())
1328 1328 Session().delete(obj)
1329 1329
1330 1330 if changed:
1331 1331 Session().expire_all()
1332 1332 pull_request.updated_on = datetime.datetime.now()
1333 1333 Session().add(pull_request)
1334 1334
1335 1335 # finally store audit logs
1336 1336 for user_data in added_audit_reviewers:
1337 1337 self._log_audit_action(
1338 1338 'repo.pull_request.reviewer.add', {'data': user_data},
1339 1339 user, pull_request)
1340 1340 for user_data in removed_audit_reviewers:
1341 1341 self._log_audit_action(
1342 1342 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1343 1343 user, pull_request)
1344 1344
1345 1345 self.notify_reviewers(pull_request, ids_to_add, user)
1346 1346 return ids_to_add, ids_to_remove
1347 1347
1348 1348 def update_observers(self, pull_request, observer_data, user):
1349 1349 """
1350 1350 Update the observers in the pull request
1351 1351
1352 1352 :param pull_request: the pr to update
1353 1353 :param observer_data: list of tuples
1354 1354 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1355 1355 :param user: current use who triggers this action
1356 1356 """
1357 1357 pull_request = self.__get_pull_request(pull_request)
1358 1358 if pull_request.is_closed():
1359 1359 raise ValueError('This pull request is closed')
1360 1360
1361 1361 observers = {}
1362 1362 for user_id, reasons, mandatory, role, rules in observer_data:
1363 1363 if isinstance(user_id, (int, compat.string_types)):
1364 1364 user_id = self._get_user(user_id).user_id
1365 1365 observers[user_id] = {
1366 1366 'reasons': reasons, 'observers': mandatory, 'role': role}
1367 1367
1368 1368 observers_ids = set(observers.keys())
1369 1369 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1370 1370 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1371 1371
1372 1372 current_observers_ids = set([x.user.user_id for x in current_observers])
1373 1373
1374 1374 ids_to_add = observers_ids.difference(current_observers_ids)
1375 1375 ids_to_remove = current_observers_ids.difference(observers_ids)
1376 1376
1377 1377 log.debug("Adding %s observer", ids_to_add)
1378 1378 log.debug("Removing %s observer", ids_to_remove)
1379 1379 changed = False
1380 1380 added_audit_observers = []
1381 1381 removed_audit_observers = []
1382 1382
1383 1383 for uid in ids_to_add:
1384 1384 changed = True
1385 1385 _usr = self._get_user(uid)
1386 1386 observer = PullRequestReviewers()
1387 1387 observer.user = _usr
1388 1388 observer.pull_request = pull_request
1389 1389 observer.reasons = observers[uid]['reasons']
1390 1390 # NOTE(marcink): mandatory shouldn't be changed now
1391 1391 # observer.mandatory = observer[uid]['reasons']
1392 1392
1393 1393 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1394 1394 observer.role = PullRequestReviewers.ROLE_OBSERVER
1395 1395 Session().add(observer)
1396 1396 added_audit_observers.append(observer.get_dict())
1397 1397
1398 1398 for uid in ids_to_remove:
1399 1399 changed = True
1400 1400 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1401 1401 # This is an edge case that handles previous state of having the same reviewer twice.
1402 1402 # this CAN happen due to the lack of DB checks
1403 1403 observers = PullRequestReviewers.query()\
1404 1404 .filter(PullRequestReviewers.user_id == uid,
1405 1405 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1406 1406 PullRequestReviewers.pull_request == pull_request)\
1407 1407 .all()
1408 1408
1409 1409 for obj in observers:
1410 1410 added_audit_observers.append(obj.get_dict())
1411 1411 Session().delete(obj)
1412 1412
1413 1413 if changed:
1414 1414 Session().expire_all()
1415 1415 pull_request.updated_on = datetime.datetime.now()
1416 1416 Session().add(pull_request)
1417 1417
1418 1418 # finally store audit logs
1419 1419 for user_data in added_audit_observers:
1420 1420 self._log_audit_action(
1421 1421 'repo.pull_request.observer.add', {'data': user_data},
1422 1422 user, pull_request)
1423 1423 for user_data in removed_audit_observers:
1424 1424 self._log_audit_action(
1425 1425 'repo.pull_request.observer.delete', {'old_data': user_data},
1426 1426 user, pull_request)
1427 1427
1428 1428 self.notify_observers(pull_request, ids_to_add, user)
1429 1429 return ids_to_add, ids_to_remove
1430 1430
1431 1431 def get_url(self, pull_request, request=None, permalink=False):
1432 1432 if not request:
1433 1433 request = get_current_request()
1434 1434
1435 1435 if permalink:
1436 1436 return request.route_url(
1437 1437 'pull_requests_global',
1438 1438 pull_request_id=pull_request.pull_request_id,)
1439 1439 else:
1440 1440 return request.route_url('pullrequest_show',
1441 1441 repo_name=safe_str(pull_request.target_repo.repo_name),
1442 1442 pull_request_id=pull_request.pull_request_id,)
1443 1443
1444 1444 def get_shadow_clone_url(self, pull_request, request=None):
1445 1445 """
1446 1446 Returns qualified url pointing to the shadow repository. If this pull
1447 1447 request is closed there is no shadow repository and ``None`` will be
1448 1448 returned.
1449 1449 """
1450 1450 if pull_request.is_closed():
1451 1451 return None
1452 1452 else:
1453 1453 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1454 1454 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1455 1455
1456 1456 def _notify_reviewers(self, pull_request, user_ids, role, user):
1457 1457 # notification to reviewers/observers
1458 1458 if not user_ids:
1459 1459 return
1460 1460
1461 1461 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1462 1462
1463 1463 pull_request_obj = pull_request
1464 1464 # get the current participants of this pull request
1465 1465 recipients = user_ids
1466 1466 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1467 1467
1468 1468 pr_source_repo = pull_request_obj.source_repo
1469 1469 pr_target_repo = pull_request_obj.target_repo
1470 1470
1471 1471 pr_url = h.route_url('pullrequest_show',
1472 1472 repo_name=pr_target_repo.repo_name,
1473 1473 pull_request_id=pull_request_obj.pull_request_id,)
1474 1474
1475 1475 # set some variables for email notification
1476 1476 pr_target_repo_url = h.route_url(
1477 1477 'repo_summary', repo_name=pr_target_repo.repo_name)
1478 1478
1479 1479 pr_source_repo_url = h.route_url(
1480 1480 'repo_summary', repo_name=pr_source_repo.repo_name)
1481 1481
1482 1482 # pull request specifics
1483 1483 pull_request_commits = [
1484 1484 (x.raw_id, x.message)
1485 1485 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1486 1486
1487 1487 current_rhodecode_user = user
1488 1488 kwargs = {
1489 1489 'user': current_rhodecode_user,
1490 1490 'pull_request_author': pull_request.author,
1491 1491 'pull_request': pull_request_obj,
1492 1492 'pull_request_commits': pull_request_commits,
1493 1493
1494 1494 'pull_request_target_repo': pr_target_repo,
1495 1495 'pull_request_target_repo_url': pr_target_repo_url,
1496 1496
1497 1497 'pull_request_source_repo': pr_source_repo,
1498 1498 'pull_request_source_repo_url': pr_source_repo_url,
1499 1499
1500 1500 'pull_request_url': pr_url,
1501 1501 'thread_ids': [pr_url],
1502 1502 'user_role': role
1503 1503 }
1504 1504
1505 1505 # create notification objects, and emails
1506 1506 NotificationModel().create(
1507 1507 created_by=current_rhodecode_user,
1508 1508 notification_subject='', # Filled in based on the notification_type
1509 1509 notification_body='', # Filled in based on the notification_type
1510 1510 notification_type=notification_type,
1511 1511 recipients=recipients,
1512 1512 email_kwargs=kwargs,
1513 1513 )
1514 1514
1515 1515 def notify_reviewers(self, pull_request, reviewers_ids, user):
1516 1516 return self._notify_reviewers(pull_request, reviewers_ids,
1517 1517 PullRequestReviewers.ROLE_REVIEWER, user)
1518 1518
1519 1519 def notify_observers(self, pull_request, observers_ids, user):
1520 1520 return self._notify_reviewers(pull_request, observers_ids,
1521 1521 PullRequestReviewers.ROLE_OBSERVER, user)
1522 1522
1523 1523 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1524 1524 commit_changes, file_changes):
1525 1525
1526 1526 updating_user_id = updating_user.user_id
1527 1527 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1528 1528 # NOTE(marcink): send notification to all other users except to
1529 1529 # person who updated the PR
1530 1530 recipients = reviewers.difference(set([updating_user_id]))
1531 1531
1532 1532 log.debug('Notify following recipients about pull-request update %s', recipients)
1533 1533
1534 1534 pull_request_obj = pull_request
1535 1535
1536 1536 # send email about the update
1537 1537 changed_files = (
1538 1538 file_changes.added + file_changes.modified + file_changes.removed)
1539 1539
1540 1540 pr_source_repo = pull_request_obj.source_repo
1541 1541 pr_target_repo = pull_request_obj.target_repo
1542 1542
1543 1543 pr_url = h.route_url('pullrequest_show',
1544 1544 repo_name=pr_target_repo.repo_name,
1545 1545 pull_request_id=pull_request_obj.pull_request_id,)
1546 1546
1547 1547 # set some variables for email notification
1548 1548 pr_target_repo_url = h.route_url(
1549 1549 'repo_summary', repo_name=pr_target_repo.repo_name)
1550 1550
1551 1551 pr_source_repo_url = h.route_url(
1552 1552 'repo_summary', repo_name=pr_source_repo.repo_name)
1553 1553
1554 1554 email_kwargs = {
1555 1555 'date': datetime.datetime.now(),
1556 1556 'updating_user': updating_user,
1557 1557
1558 1558 'pull_request': pull_request_obj,
1559 1559
1560 1560 'pull_request_target_repo': pr_target_repo,
1561 1561 'pull_request_target_repo_url': pr_target_repo_url,
1562 1562
1563 1563 'pull_request_source_repo': pr_source_repo,
1564 1564 'pull_request_source_repo_url': pr_source_repo_url,
1565 1565
1566 1566 'pull_request_url': pr_url,
1567 1567
1568 1568 'ancestor_commit_id': ancestor_commit_id,
1569 1569 'added_commits': commit_changes.added,
1570 1570 'removed_commits': commit_changes.removed,
1571 1571 'changed_files': changed_files,
1572 1572 'added_files': file_changes.added,
1573 1573 'modified_files': file_changes.modified,
1574 1574 'removed_files': file_changes.removed,
1575 1575 'thread_ids': [pr_url],
1576 1576 }
1577 1577
1578 1578 # create notification objects, and emails
1579 1579 NotificationModel().create(
1580 1580 created_by=updating_user,
1581 1581 notification_subject='', # Filled in based on the notification_type
1582 1582 notification_body='', # Filled in based on the notification_type
1583 1583 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1584 1584 recipients=recipients,
1585 1585 email_kwargs=email_kwargs,
1586 1586 )
1587 1587
1588 1588 def delete(self, pull_request, user=None):
1589 1589 if not user:
1590 1590 user = getattr(get_current_rhodecode_user(), 'username', None)
1591 1591
1592 1592 pull_request = self.__get_pull_request(pull_request)
1593 1593 old_data = pull_request.get_api_data(with_merge_state=False)
1594 1594 self._cleanup_merge_workspace(pull_request)
1595 1595 self._log_audit_action(
1596 1596 'repo.pull_request.delete', {'old_data': old_data},
1597 1597 user, pull_request)
1598 1598 Session().delete(pull_request)
1599 1599
1600 1600 def close_pull_request(self, pull_request, user):
1601 1601 pull_request = self.__get_pull_request(pull_request)
1602 1602 self._cleanup_merge_workspace(pull_request)
1603 1603 pull_request.status = PullRequest.STATUS_CLOSED
1604 1604 pull_request.updated_on = datetime.datetime.now()
1605 1605 Session().add(pull_request)
1606 1606 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1607 1607
1608 1608 pr_data = pull_request.get_api_data(with_merge_state=False)
1609 1609 self._log_audit_action(
1610 1610 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1611 1611
1612 1612 def close_pull_request_with_comment(
1613 1613 self, pull_request, user, repo, message=None, auth_user=None):
1614 1614
1615 1615 pull_request_review_status = pull_request.calculated_review_status()
1616 1616
1617 1617 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1618 1618 # approved only if we have voting consent
1619 1619 status = ChangesetStatus.STATUS_APPROVED
1620 1620 else:
1621 1621 status = ChangesetStatus.STATUS_REJECTED
1622 1622 status_lbl = ChangesetStatus.get_status_lbl(status)
1623 1623
1624 1624 default_message = (
1625 1625 'Closing with status change {transition_icon} {status}.'
1626 1626 ).format(transition_icon='>', status=status_lbl)
1627 1627 text = message or default_message
1628 1628
1629 1629 # create a comment, and link it to new status
1630 1630 comment = CommentsModel().create(
1631 1631 text=text,
1632 1632 repo=repo.repo_id,
1633 1633 user=user.user_id,
1634 1634 pull_request=pull_request.pull_request_id,
1635 1635 status_change=status_lbl,
1636 1636 status_change_type=status,
1637 1637 closing_pr=True,
1638 1638 auth_user=auth_user,
1639 1639 )
1640 1640
1641 1641 # calculate old status before we change it
1642 1642 old_calculated_status = pull_request.calculated_review_status()
1643 1643 ChangesetStatusModel().set_status(
1644 1644 repo.repo_id,
1645 1645 status,
1646 1646 user.user_id,
1647 1647 comment=comment,
1648 1648 pull_request=pull_request.pull_request_id
1649 1649 )
1650 1650
1651 1651 Session().flush()
1652 1652
1653 1653 self.trigger_pull_request_hook(pull_request, user, 'comment',
1654 1654 data={'comment': comment})
1655 1655
1656 1656 # we now calculate the status of pull request again, and based on that
1657 1657 # calculation trigger status change. This might happen in cases
1658 1658 # that non-reviewer admin closes a pr, which means his vote doesn't
1659 1659 # change the status, while if he's a reviewer this might change it.
1660 1660 calculated_status = pull_request.calculated_review_status()
1661 1661 if old_calculated_status != calculated_status:
1662 1662 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1663 1663 data={'status': calculated_status})
1664 1664
1665 1665 # finally close the PR
1666 1666 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1667 1667
1668 1668 return comment, status
1669 1669
1670 1670 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1671 1671 _ = translator or get_current_request().translate
1672 1672
1673 1673 if not self._is_merge_enabled(pull_request):
1674 1674 return None, False, _('Server-side pull request merging is disabled.')
1675 1675
1676 1676 if pull_request.is_closed():
1677 1677 return None, False, _('This pull request is closed.')
1678 1678
1679 1679 merge_possible, msg = self._check_repo_requirements(
1680 1680 target=pull_request.target_repo, source=pull_request.source_repo,
1681 1681 translator=_)
1682 1682 if not merge_possible:
1683 1683 return None, merge_possible, msg
1684 1684
1685 1685 try:
1686 1686 merge_response = self._try_merge(
1687 1687 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1688 1688 log.debug("Merge response: %s", merge_response)
1689 1689 return merge_response, merge_response.possible, merge_response.merge_status_message
1690 1690 except NotImplementedError:
1691 1691 return None, False, _('Pull request merging is not supported.')
1692 1692
1693 1693 def _check_repo_requirements(self, target, source, translator):
1694 1694 """
1695 1695 Check if `target` and `source` have compatible requirements.
1696 1696
1697 1697 Currently this is just checking for largefiles.
1698 1698 """
1699 1699 _ = translator
1700 1700 target_has_largefiles = self._has_largefiles(target)
1701 1701 source_has_largefiles = self._has_largefiles(source)
1702 1702 merge_possible = True
1703 1703 message = u''
1704 1704
1705 1705 if target_has_largefiles != source_has_largefiles:
1706 1706 merge_possible = False
1707 1707 if source_has_largefiles:
1708 1708 message = _(
1709 1709 'Target repository large files support is disabled.')
1710 1710 else:
1711 1711 message = _(
1712 1712 'Source repository large files support is disabled.')
1713 1713
1714 1714 return merge_possible, message
1715 1715
1716 1716 def _has_largefiles(self, repo):
1717 1717 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1718 1718 'extensions', 'largefiles')
1719 1719 return largefiles_ui and largefiles_ui[0].active
1720 1720
1721 1721 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1722 1722 """
1723 1723 Try to merge the pull request and return the merge status.
1724 1724 """
1725 1725 log.debug(
1726 1726 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1727 1727 pull_request.pull_request_id, force_shadow_repo_refresh)
1728 1728 target_vcs = pull_request.target_repo.scm_instance()
1729 1729 # Refresh the target reference.
1730 1730 try:
1731 1731 target_ref = self._refresh_reference(
1732 1732 pull_request.target_ref_parts, target_vcs)
1733 1733 except CommitDoesNotExistError:
1734 1734 merge_state = MergeResponse(
1735 1735 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1736 1736 metadata={'target_ref': pull_request.target_ref_parts})
1737 1737 return merge_state
1738 1738
1739 1739 target_locked = pull_request.target_repo.locked
1740 1740 if target_locked and target_locked[0]:
1741 1741 locked_by = 'user:{}'.format(target_locked[0])
1742 1742 log.debug("The target repository is locked by %s.", locked_by)
1743 1743 merge_state = MergeResponse(
1744 1744 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1745 1745 metadata={'locked_by': locked_by})
1746 1746 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1747 1747 pull_request, target_ref):
1748 1748 log.debug("Refreshing the merge status of the repository.")
1749 1749 merge_state = self._refresh_merge_state(
1750 1750 pull_request, target_vcs, target_ref)
1751 1751 else:
1752 1752 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1753 1753 metadata = {
1754 1754 'unresolved_files': '',
1755 1755 'target_ref': pull_request.target_ref_parts,
1756 1756 'source_ref': pull_request.source_ref_parts,
1757 1757 }
1758 1758 if pull_request.last_merge_metadata:
1759 1759 metadata.update(pull_request.last_merge_metadata_parsed)
1760 1760
1761 1761 if not possible and target_ref.type == 'branch':
1762 1762 # NOTE(marcink): case for mercurial multiple heads on branch
1763 1763 heads = target_vcs._heads(target_ref.name)
1764 1764 if len(heads) != 1:
1765 1765 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1766 1766 metadata.update({
1767 1767 'heads': heads
1768 1768 })
1769 1769
1770 1770 merge_state = MergeResponse(
1771 1771 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1772 1772
1773 1773 return merge_state
1774 1774
1775 1775 def _refresh_reference(self, reference, vcs_repository):
1776 1776 if reference.type in self.UPDATABLE_REF_TYPES:
1777 1777 name_or_id = reference.name
1778 1778 else:
1779 1779 name_or_id = reference.commit_id
1780 1780
1781 1781 refreshed_commit = vcs_repository.get_commit(name_or_id)
1782 1782 refreshed_reference = Reference(
1783 1783 reference.type, reference.name, refreshed_commit.raw_id)
1784 1784 return refreshed_reference
1785 1785
1786 1786 def _needs_merge_state_refresh(self, pull_request, target_reference):
1787 1787 return not(
1788 1788 pull_request.revisions and
1789 1789 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1790 1790 target_reference.commit_id == pull_request._last_merge_target_rev)
1791 1791
1792 1792 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1793 1793 workspace_id = self._workspace_id(pull_request)
1794 1794 source_vcs = pull_request.source_repo.scm_instance()
1795 1795 repo_id = pull_request.target_repo.repo_id
1796 1796 use_rebase = self._use_rebase_for_merging(pull_request)
1797 1797 close_branch = self._close_branch_before_merging(pull_request)
1798 1798 merge_state = target_vcs.merge(
1799 1799 repo_id, workspace_id,
1800 1800 target_reference, source_vcs, pull_request.source_ref_parts,
1801 1801 dry_run=True, use_rebase=use_rebase,
1802 1802 close_branch=close_branch)
1803 1803
1804 1804 # Do not store the response if there was an unknown error.
1805 1805 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1806 1806 pull_request._last_merge_source_rev = \
1807 1807 pull_request.source_ref_parts.commit_id
1808 1808 pull_request._last_merge_target_rev = target_reference.commit_id
1809 1809 pull_request.last_merge_status = merge_state.failure_reason
1810 1810 pull_request.last_merge_metadata = merge_state.metadata
1811 1811
1812 1812 pull_request.shadow_merge_ref = merge_state.merge_ref
1813 1813 Session().add(pull_request)
1814 1814 Session().commit()
1815 1815
1816 1816 return merge_state
1817 1817
1818 1818 def _workspace_id(self, pull_request):
1819 1819 workspace_id = 'pr-%s' % pull_request.pull_request_id
1820 1820 return workspace_id
1821 1821
1822 1822 def generate_repo_data(self, repo, commit_id=None, branch=None,
1823 1823 bookmark=None, translator=None):
1824 1824 from rhodecode.model.repo import RepoModel
1825 1825
1826 1826 all_refs, selected_ref = \
1827 1827 self._get_repo_pullrequest_sources(
1828 1828 repo.scm_instance(), commit_id=commit_id,
1829 1829 branch=branch, bookmark=bookmark, translator=translator)
1830 1830
1831 1831 refs_select2 = []
1832 1832 for element in all_refs:
1833 1833 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1834 1834 refs_select2.append({'text': element[1], 'children': children})
1835 1835
1836 1836 return {
1837 1837 'user': {
1838 1838 'user_id': repo.user.user_id,
1839 1839 'username': repo.user.username,
1840 1840 'firstname': repo.user.first_name,
1841 1841 'lastname': repo.user.last_name,
1842 1842 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1843 1843 },
1844 1844 'name': repo.repo_name,
1845 1845 'link': RepoModel().get_url(repo),
1846 1846 'description': h.chop_at_smart(repo.description_safe, '\n'),
1847 1847 'refs': {
1848 1848 'all_refs': all_refs,
1849 1849 'selected_ref': selected_ref,
1850 1850 'select2_refs': refs_select2
1851 1851 }
1852 1852 }
1853 1853
1854 1854 def generate_pullrequest_title(self, source, source_ref, target):
1855 1855 return u'{source}#{at_ref} to {target}'.format(
1856 1856 source=source,
1857 1857 at_ref=source_ref,
1858 1858 target=target,
1859 1859 )
1860 1860
1861 1861 def _cleanup_merge_workspace(self, pull_request):
1862 1862 # Merging related cleanup
1863 1863 repo_id = pull_request.target_repo.repo_id
1864 1864 target_scm = pull_request.target_repo.scm_instance()
1865 1865 workspace_id = self._workspace_id(pull_request)
1866 1866
1867 1867 try:
1868 1868 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1869 1869 except NotImplementedError:
1870 1870 pass
1871 1871
1872 1872 def _get_repo_pullrequest_sources(
1873 1873 self, repo, commit_id=None, branch=None, bookmark=None,
1874 1874 translator=None):
1875 1875 """
1876 1876 Return a structure with repo's interesting commits, suitable for
1877 1877 the selectors in pullrequest controller
1878 1878
1879 1879 :param commit_id: a commit that must be in the list somehow
1880 1880 and selected by default
1881 1881 :param branch: a branch that must be in the list and selected
1882 1882 by default - even if closed
1883 1883 :param bookmark: a bookmark that must be in the list and selected
1884 1884 """
1885 1885 _ = translator or get_current_request().translate
1886 1886
1887 1887 commit_id = safe_str(commit_id) if commit_id else None
1888 1888 branch = safe_unicode(branch) if branch else None
1889 1889 bookmark = safe_unicode(bookmark) if bookmark else None
1890 1890
1891 1891 selected = None
1892 1892
1893 1893 # order matters: first source that has commit_id in it will be selected
1894 1894 sources = []
1895 1895 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1896 1896 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1897 1897
1898 1898 if commit_id:
1899 1899 ref_commit = (h.short_id(commit_id), commit_id)
1900 1900 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1901 1901
1902 1902 sources.append(
1903 1903 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1904 1904 )
1905 1905
1906 1906 groups = []
1907 1907
1908 1908 for group_key, ref_list, group_name, match in sources:
1909 1909 group_refs = []
1910 1910 for ref_name, ref_id in ref_list:
1911 1911 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1912 1912 group_refs.append((ref_key, ref_name))
1913 1913
1914 1914 if not selected:
1915 1915 if set([commit_id, match]) & set([ref_id, ref_name]):
1916 1916 selected = ref_key
1917 1917
1918 1918 if group_refs:
1919 1919 groups.append((group_refs, group_name))
1920 1920
1921 1921 if not selected:
1922 1922 ref = commit_id or branch or bookmark
1923 1923 if ref:
1924 1924 raise CommitDoesNotExistError(
1925 1925 u'No commit refs could be found matching: {}'.format(ref))
1926 1926 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1927 1927 selected = u'branch:{}:{}'.format(
1928 1928 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1929 1929 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1930 1930 )
1931 1931 elif repo.commit_ids:
1932 1932 # make the user select in this case
1933 1933 selected = None
1934 1934 else:
1935 1935 raise EmptyRepositoryError()
1936 1936 return groups, selected
1937 1937
1938 1938 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1939 1939 hide_whitespace_changes, diff_context):
1940 1940
1941 1941 return self._get_diff_from_pr_or_version(
1942 1942 source_repo, source_ref_id, target_ref_id,
1943 1943 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1944 1944
1945 1945 def _get_diff_from_pr_or_version(
1946 1946 self, source_repo, source_ref_id, target_ref_id,
1947 1947 hide_whitespace_changes, diff_context):
1948 1948
1949 1949 target_commit = source_repo.get_commit(
1950 1950 commit_id=safe_str(target_ref_id))
1951 1951 source_commit = source_repo.get_commit(
1952 1952 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1953 1953 if isinstance(source_repo, Repository):
1954 1954 vcs_repo = source_repo.scm_instance()
1955 1955 else:
1956 1956 vcs_repo = source_repo
1957 1957
1958 1958 # TODO: johbo: In the context of an update, we cannot reach
1959 1959 # the old commit anymore with our normal mechanisms. It needs
1960 1960 # some sort of special support in the vcs layer to avoid this
1961 1961 # workaround.
1962 1962 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1963 1963 vcs_repo.alias == 'git'):
1964 1964 source_commit.raw_id = safe_str(source_ref_id)
1965 1965
1966 1966 log.debug('calculating diff between '
1967 1967 'source_ref:%s and target_ref:%s for repo `%s`',
1968 1968 target_ref_id, source_ref_id,
1969 1969 safe_unicode(vcs_repo.path))
1970 1970
1971 1971 vcs_diff = vcs_repo.get_diff(
1972 1972 commit1=target_commit, commit2=source_commit,
1973 1973 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1974 1974 return vcs_diff
1975 1975
1976 1976 def _is_merge_enabled(self, pull_request):
1977 1977 return self._get_general_setting(
1978 1978 pull_request, 'rhodecode_pr_merge_enabled')
1979 1979
1980 1980 def _use_rebase_for_merging(self, pull_request):
1981 1981 repo_type = pull_request.target_repo.repo_type
1982 1982 if repo_type == 'hg':
1983 1983 return self._get_general_setting(
1984 1984 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1985 1985 elif repo_type == 'git':
1986 1986 return self._get_general_setting(
1987 1987 pull_request, 'rhodecode_git_use_rebase_for_merging')
1988 1988
1989 1989 return False
1990 1990
1991 1991 def _user_name_for_merging(self, pull_request, user):
1992 1992 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1993 1993 if env_user_name_attr and hasattr(user, env_user_name_attr):
1994 1994 user_name_attr = env_user_name_attr
1995 1995 else:
1996 1996 user_name_attr = 'short_contact'
1997 1997
1998 1998 user_name = getattr(user, user_name_attr)
1999 1999 return user_name
2000 2000
2001 2001 def _close_branch_before_merging(self, pull_request):
2002 2002 repo_type = pull_request.target_repo.repo_type
2003 2003 if repo_type == 'hg':
2004 2004 return self._get_general_setting(
2005 2005 pull_request, 'rhodecode_hg_close_branch_before_merging')
2006 2006 elif repo_type == 'git':
2007 2007 return self._get_general_setting(
2008 2008 pull_request, 'rhodecode_git_close_branch_before_merging')
2009 2009
2010 2010 return False
2011 2011
2012 2012 def _get_general_setting(self, pull_request, settings_key, default=False):
2013 2013 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2014 2014 settings = settings_model.get_general_settings()
2015 2015 return settings.get(settings_key, default)
2016 2016
2017 2017 def _log_audit_action(self, action, action_data, user, pull_request):
2018 2018 audit_logger.store(
2019 2019 action=action,
2020 2020 action_data=action_data,
2021 2021 user=user,
2022 2022 repo=pull_request.target_repo)
2023 2023
2024 2024 def get_reviewer_functions(self):
2025 2025 """
2026 2026 Fetches functions for validation and fetching default reviewers.
2027 2027 If available we use the EE package, else we fallback to CE
2028 2028 package functions
2029 2029 """
2030 2030 try:
2031 2031 from rc_reviewers.utils import get_default_reviewers_data
2032 2032 from rc_reviewers.utils import validate_default_reviewers
2033 2033 from rc_reviewers.utils import validate_observers
2034 2034 except ImportError:
2035 2035 from rhodecode.apps.repository.utils import get_default_reviewers_data
2036 2036 from rhodecode.apps.repository.utils import validate_default_reviewers
2037 2037 from rhodecode.apps.repository.utils import validate_observers
2038 2038
2039 2039 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2040 2040
2041 2041
2042 2042 class MergeCheck(object):
2043 2043 """
2044 2044 Perform Merge Checks and returns a check object which stores information
2045 2045 about merge errors, and merge conditions
2046 2046 """
2047 2047 TODO_CHECK = 'todo'
2048 2048 PERM_CHECK = 'perm'
2049 2049 REVIEW_CHECK = 'review'
2050 2050 MERGE_CHECK = 'merge'
2051 2051 WIP_CHECK = 'wip'
2052 2052
2053 2053 def __init__(self):
2054 2054 self.review_status = None
2055 2055 self.merge_possible = None
2056 2056 self.merge_msg = ''
2057 2057 self.merge_response = None
2058 2058 self.failed = None
2059 2059 self.errors = []
2060 2060 self.error_details = OrderedDict()
2061 2061 self.source_commit = AttributeDict()
2062 2062 self.target_commit = AttributeDict()
2063 self.reviewers_count = 0
2064 self.observers_count = 0
2063 2065
2064 2066 def __repr__(self):
2065 2067 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2066 2068 self.merge_possible, self.failed, self.errors)
2067 2069
2068 2070 def push_error(self, error_type, message, error_key, details):
2069 2071 self.failed = True
2070 2072 self.errors.append([error_type, message])
2071 2073 self.error_details[error_key] = dict(
2072 2074 details=details,
2073 2075 error_type=error_type,
2074 2076 message=message
2075 2077 )
2076 2078
2077 2079 @classmethod
2078 2080 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2079 2081 force_shadow_repo_refresh=False):
2080 2082 _ = translator
2081 2083 merge_check = cls()
2082 2084
2083 2085 # title has WIP:
2084 2086 if pull_request.work_in_progress:
2085 2087 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2086 2088
2087 2089 msg = _('WIP marker in title prevents from accidental merge.')
2088 2090 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2089 2091 if fail_early:
2090 2092 return merge_check
2091 2093
2092 2094 # permissions to merge
2093 2095 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2094 2096 if not user_allowed_to_merge:
2095 2097 log.debug("MergeCheck: cannot merge, approval is pending.")
2096 2098
2097 2099 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2098 2100 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2099 2101 if fail_early:
2100 2102 return merge_check
2101 2103
2102 2104 # permission to merge into the target branch
2103 2105 target_commit_id = pull_request.target_ref_parts.commit_id
2104 2106 if pull_request.target_ref_parts.type == 'branch':
2105 2107 branch_name = pull_request.target_ref_parts.name
2106 2108 else:
2107 2109 # for mercurial we can always figure out the branch from the commit
2108 2110 # in case of bookmark
2109 2111 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2110 2112 branch_name = target_commit.branch
2111 2113
2112 2114 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2113 2115 pull_request.target_repo.repo_name, branch_name)
2114 2116 if branch_perm and branch_perm == 'branch.none':
2115 2117 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2116 2118 branch_name, rule)
2117 2119 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2118 2120 if fail_early:
2119 2121 return merge_check
2120 2122
2121 2123 # review status, must be always present
2122 2124 review_status = pull_request.calculated_review_status()
2123 2125 merge_check.review_status = review_status
2126 merge_check.reviewers_count = pull_request.reviewers_count
2127 merge_check.observers_count = pull_request.observers_count
2124 2128
2125 2129 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2126 if not status_approved:
2130 if not status_approved and merge_check.reviewers_count:
2127 2131 log.debug("MergeCheck: cannot merge, approval is pending.")
2128
2129 2132 msg = _('Pull request reviewer approval is pending.')
2130 2133
2131 2134 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2132 2135
2133 2136 if fail_early:
2134 2137 return merge_check
2135 2138
2136 2139 # left over TODOs
2137 2140 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2138 2141 if todos:
2139 2142 log.debug("MergeCheck: cannot merge, {} "
2140 2143 "unresolved TODOs left.".format(len(todos)))
2141 2144
2142 2145 if len(todos) == 1:
2143 2146 msg = _('Cannot merge, {} TODO still not resolved.').format(
2144 2147 len(todos))
2145 2148 else:
2146 2149 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2147 2150 len(todos))
2148 2151
2149 2152 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2150 2153
2151 2154 if fail_early:
2152 2155 return merge_check
2153 2156
2154 2157 # merge possible, here is the filesystem simulation + shadow repo
2155 2158 merge_response, merge_status, msg = PullRequestModel().merge_status(
2156 2159 pull_request, translator=translator,
2157 2160 force_shadow_repo_refresh=force_shadow_repo_refresh)
2158 2161
2159 2162 merge_check.merge_possible = merge_status
2160 2163 merge_check.merge_msg = msg
2161 2164 merge_check.merge_response = merge_response
2162 2165
2163 2166 source_ref_id = pull_request.source_ref_parts.commit_id
2164 2167 target_ref_id = pull_request.target_ref_parts.commit_id
2165 2168
2166 2169 try:
2167 2170 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2168 2171 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2169 2172 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2170 2173 merge_check.source_commit.current_raw_id = source_commit.raw_id
2171 2174 merge_check.source_commit.previous_raw_id = source_ref_id
2172 2175
2173 2176 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2174 2177 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2175 2178 merge_check.target_commit.current_raw_id = target_commit.raw_id
2176 2179 merge_check.target_commit.previous_raw_id = target_ref_id
2177 2180 except (SourceRefMissing, TargetRefMissing):
2178 2181 pass
2179 2182
2180 2183 if not merge_status:
2181 2184 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2182 2185 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2183 2186
2184 2187 if fail_early:
2185 2188 return merge_check
2186 2189
2187 2190 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2188 2191 return merge_check
2189 2192
2190 2193 @classmethod
2191 2194 def get_merge_conditions(cls, pull_request, translator):
2192 2195 _ = translator
2193 2196 merge_details = {}
2194 2197
2195 2198 model = PullRequestModel()
2196 2199 use_rebase = model._use_rebase_for_merging(pull_request)
2197 2200
2198 2201 if use_rebase:
2199 2202 merge_details['merge_strategy'] = dict(
2200 2203 details={},
2201 2204 message=_('Merge strategy: rebase')
2202 2205 )
2203 2206 else:
2204 2207 merge_details['merge_strategy'] = dict(
2205 2208 details={},
2206 2209 message=_('Merge strategy: explicit merge commit')
2207 2210 )
2208 2211
2209 2212 close_branch = model._close_branch_before_merging(pull_request)
2210 2213 if close_branch:
2211 2214 repo_type = pull_request.target_repo.repo_type
2212 2215 close_msg = ''
2213 2216 if repo_type == 'hg':
2214 2217 close_msg = _('Source branch will be closed before the merge.')
2215 2218 elif repo_type == 'git':
2216 2219 close_msg = _('Source branch will be deleted after the merge.')
2217 2220
2218 2221 merge_details['close_branch'] = dict(
2219 2222 details={},
2220 2223 message=close_msg
2221 2224 )
2222 2225
2223 2226 return merge_details
2224 2227
2225 2228
2226 2229 ChangeTuple = collections.namedtuple(
2227 2230 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2228 2231
2229 2232 FileChangeTuple = collections.namedtuple(
2230 2233 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now