##// END OF EJS Templates
default-reviewers: diff data should load more things lazy for better performance.
marcink -
r4508:2365f5e7 stable
parent child Browse files
Show More
@@ -1,2201 +1,2205 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 log.debug('Obtaining commit authors from set of commits')
157 158 commits = target_scm.compare(
158 159 target_ref, source_ref, source_scm, merge=True,
159 pre_load=["author"])
160 pre_load=["author", "date", "message", "branch", "parents"])
160 161
161 162 for commit in commits:
162 163 user = User.get_from_cs_author(commit.author)
163 164 if user and user not in commit_authors:
164 165 commit_authors.append(user)
165 166
166 167 # lines
167 168 if get_authors:
169 log.debug('Calculating authors of changed files')
168 170 target_commit = source_repo.get_commit(ancestor_id)
169 171
170 172 for fname, lines in changed_lines.items():
171 173 try:
172 174 node = target_commit.get_node(fname)
173 175 except Exception:
174 176 continue
175 177
176 178 if not isinstance(node, FileNode):
177 179 continue
178 180
179 181 for annotation in node.annotate:
180 182 line_no, commit_id, get_commit_func, line_text = annotation
181 183 if line_no in lines:
182 184 if commit_id not in _commit_cache:
183 185 _commit_cache[commit_id] = get_commit_func()
184 186 commit = _commit_cache[commit_id]
185 187 author = commit.author
186 188 email = commit.author_email
187 189 user = User.get_from_cs_author(author)
188 190 if user:
189 191 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
190 192 author_counts[author] = author_counts.get(author, 0) + 1
191 193 email_counts[email] = email_counts.get(email, 0) + 1
192 194
195 log.debug('Default reviewers processing finished')
196
193 197 return {
194 198 'commits': commits,
195 199 'files': all_files_changes,
196 200 'stats': stats,
197 201 'ancestor': ancestor_id,
198 202 # original authors of modified files
199 203 'original_authors': {
200 204 'users': user_counts,
201 205 'authors': author_counts,
202 206 'emails': email_counts,
203 207 },
204 208 'commit_authors': commit_authors
205 209 }
206 210
207 211
208 212 class PullRequestModel(BaseModel):
209 213
210 214 cls = PullRequest
211 215
212 216 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
213 217
214 218 UPDATE_STATUS_MESSAGES = {
215 219 UpdateFailureReason.NONE: lazy_ugettext(
216 220 'Pull request update successful.'),
217 221 UpdateFailureReason.UNKNOWN: lazy_ugettext(
218 222 'Pull request update failed because of an unknown error.'),
219 223 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
220 224 'No update needed because the source and target have not changed.'),
221 225 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
222 226 'Pull request cannot be updated because the reference type is '
223 227 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
224 228 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
225 229 'This pull request cannot be updated because the target '
226 230 'reference is missing.'),
227 231 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
228 232 'This pull request cannot be updated because the source '
229 233 'reference is missing.'),
230 234 }
231 235 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
232 236 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
233 237
234 238 def __get_pull_request(self, pull_request):
235 239 return self._get_instance((
236 240 PullRequest, PullRequestVersion), pull_request)
237 241
238 242 def _check_perms(self, perms, pull_request, user, api=False):
239 243 if not api:
240 244 return h.HasRepoPermissionAny(*perms)(
241 245 user=user, repo_name=pull_request.target_repo.repo_name)
242 246 else:
243 247 return h.HasRepoPermissionAnyApi(*perms)(
244 248 user=user, repo_name=pull_request.target_repo.repo_name)
245 249
246 250 def check_user_read(self, pull_request, user, api=False):
247 251 _perms = ('repository.admin', 'repository.write', 'repository.read',)
248 252 return self._check_perms(_perms, pull_request, user, api)
249 253
250 254 def check_user_merge(self, pull_request, user, api=False):
251 255 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
252 256 return self._check_perms(_perms, pull_request, user, api)
253 257
254 258 def check_user_update(self, pull_request, user, api=False):
255 259 owner = user.user_id == pull_request.user_id
256 260 return self.check_user_merge(pull_request, user, api) or owner
257 261
258 262 def check_user_delete(self, pull_request, user):
259 263 owner = user.user_id == pull_request.user_id
260 264 _perms = ('repository.admin',)
261 265 return self._check_perms(_perms, pull_request, user) or owner
262 266
263 267 def check_user_change_status(self, pull_request, user, api=False):
264 268 reviewer = user.user_id in [x.user_id for x in
265 269 pull_request.reviewers]
266 270 return self.check_user_update(pull_request, user, api) or reviewer
267 271
268 272 def check_user_comment(self, pull_request, user):
269 273 owner = user.user_id == pull_request.user_id
270 274 return self.check_user_read(pull_request, user) or owner
271 275
272 276 def get(self, pull_request):
273 277 return self.__get_pull_request(pull_request)
274 278
275 279 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
276 280 statuses=None, opened_by=None, order_by=None,
277 281 order_dir='desc', only_created=False):
278 282 repo = None
279 283 if repo_name:
280 284 repo = self._get_repo(repo_name)
281 285
282 286 q = PullRequest.query()
283 287
284 288 if search_q:
285 289 like_expression = u'%{}%'.format(safe_unicode(search_q))
286 290 q = q.join(User)
287 291 q = q.filter(or_(
288 292 cast(PullRequest.pull_request_id, String).ilike(like_expression),
289 293 User.username.ilike(like_expression),
290 294 PullRequest.title.ilike(like_expression),
291 295 PullRequest.description.ilike(like_expression),
292 296 ))
293 297
294 298 # source or target
295 299 if repo and source:
296 300 q = q.filter(PullRequest.source_repo == repo)
297 301 elif repo:
298 302 q = q.filter(PullRequest.target_repo == repo)
299 303
300 304 # closed,opened
301 305 if statuses:
302 306 q = q.filter(PullRequest.status.in_(statuses))
303 307
304 308 # opened by filter
305 309 if opened_by:
306 310 q = q.filter(PullRequest.user_id.in_(opened_by))
307 311
308 312 # only get those that are in "created" state
309 313 if only_created:
310 314 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
311 315
312 316 if order_by:
313 317 order_map = {
314 318 'name_raw': PullRequest.pull_request_id,
315 319 'id': PullRequest.pull_request_id,
316 320 'title': PullRequest.title,
317 321 'updated_on_raw': PullRequest.updated_on,
318 322 'target_repo': PullRequest.target_repo_id
319 323 }
320 324 if order_dir == 'asc':
321 325 q = q.order_by(order_map[order_by].asc())
322 326 else:
323 327 q = q.order_by(order_map[order_by].desc())
324 328
325 329 return q
326 330
327 331 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
328 332 opened_by=None):
329 333 """
330 334 Count the number of pull requests for a specific repository.
331 335
332 336 :param repo_name: target or source repo
333 337 :param search_q: filter by text
334 338 :param source: boolean flag to specify if repo_name refers to source
335 339 :param statuses: list of pull request statuses
336 340 :param opened_by: author user of the pull request
337 341 :returns: int number of pull requests
338 342 """
339 343 q = self._prepare_get_all_query(
340 344 repo_name, search_q=search_q, source=source, statuses=statuses,
341 345 opened_by=opened_by)
342 346
343 347 return q.count()
344 348
345 349 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
346 350 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
347 351 """
348 352 Get all pull requests for a specific repository.
349 353
350 354 :param repo_name: target or source repo
351 355 :param search_q: filter by text
352 356 :param source: boolean flag to specify if repo_name refers to source
353 357 :param statuses: list of pull request statuses
354 358 :param opened_by: author user of the pull request
355 359 :param offset: pagination offset
356 360 :param length: length of returned list
357 361 :param order_by: order of the returned list
358 362 :param order_dir: 'asc' or 'desc' ordering direction
359 363 :returns: list of pull requests
360 364 """
361 365 q = self._prepare_get_all_query(
362 366 repo_name, search_q=search_q, source=source, statuses=statuses,
363 367 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
364 368
365 369 if length:
366 370 pull_requests = q.limit(length).offset(offset).all()
367 371 else:
368 372 pull_requests = q.all()
369 373
370 374 return pull_requests
371 375
372 376 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
373 377 opened_by=None):
374 378 """
375 379 Count the number of pull requests for a specific repository that are
376 380 awaiting review.
377 381
378 382 :param repo_name: target or source repo
379 383 :param search_q: filter by text
380 384 :param source: boolean flag to specify if repo_name refers to source
381 385 :param statuses: list of pull request statuses
382 386 :param opened_by: author user of the pull request
383 387 :returns: int number of pull requests
384 388 """
385 389 pull_requests = self.get_awaiting_review(
386 390 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
387 391
388 392 return len(pull_requests)
389 393
390 394 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
391 395 opened_by=None, offset=0, length=None,
392 396 order_by=None, order_dir='desc'):
393 397 """
394 398 Get all pull requests for a specific repository that are awaiting
395 399 review.
396 400
397 401 :param repo_name: target or source repo
398 402 :param search_q: filter by text
399 403 :param source: boolean flag to specify if repo_name refers to source
400 404 :param statuses: list of pull request statuses
401 405 :param opened_by: author user of the pull request
402 406 :param offset: pagination offset
403 407 :param length: length of returned list
404 408 :param order_by: order of the returned list
405 409 :param order_dir: 'asc' or 'desc' ordering direction
406 410 :returns: list of pull requests
407 411 """
408 412 pull_requests = self.get_all(
409 413 repo_name, search_q=search_q, source=source, statuses=statuses,
410 414 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
411 415
412 416 _filtered_pull_requests = []
413 417 for pr in pull_requests:
414 418 status = pr.calculated_review_status()
415 419 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
416 420 ChangesetStatus.STATUS_UNDER_REVIEW]:
417 421 _filtered_pull_requests.append(pr)
418 422 if length:
419 423 return _filtered_pull_requests[offset:offset+length]
420 424 else:
421 425 return _filtered_pull_requests
422 426
423 427 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
424 428 opened_by=None, user_id=None):
425 429 """
426 430 Count the number of pull requests for a specific repository that are
427 431 awaiting review from a specific user.
428 432
429 433 :param repo_name: target or source repo
430 434 :param search_q: filter by text
431 435 :param source: boolean flag to specify if repo_name refers to source
432 436 :param statuses: list of pull request statuses
433 437 :param opened_by: author user of the pull request
434 438 :param user_id: reviewer user of the pull request
435 439 :returns: int number of pull requests
436 440 """
437 441 pull_requests = self.get_awaiting_my_review(
438 442 repo_name, search_q=search_q, source=source, statuses=statuses,
439 443 opened_by=opened_by, user_id=user_id)
440 444
441 445 return len(pull_requests)
442 446
443 447 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
444 448 opened_by=None, user_id=None, offset=0,
445 449 length=None, order_by=None, order_dir='desc'):
446 450 """
447 451 Get all pull requests for a specific repository that are awaiting
448 452 review from a specific user.
449 453
450 454 :param repo_name: target or source repo
451 455 :param search_q: filter by text
452 456 :param source: boolean flag to specify if repo_name refers to source
453 457 :param statuses: list of pull request statuses
454 458 :param opened_by: author user of the pull request
455 459 :param user_id: reviewer user of the pull request
456 460 :param offset: pagination offset
457 461 :param length: length of returned list
458 462 :param order_by: order of the returned list
459 463 :param order_dir: 'asc' or 'desc' ordering direction
460 464 :returns: list of pull requests
461 465 """
462 466 pull_requests = self.get_all(
463 467 repo_name, search_q=search_q, source=source, statuses=statuses,
464 468 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
465 469
466 470 _my = PullRequestModel().get_not_reviewed(user_id)
467 471 my_participation = []
468 472 for pr in pull_requests:
469 473 if pr in _my:
470 474 my_participation.append(pr)
471 475 _filtered_pull_requests = my_participation
472 476 if length:
473 477 return _filtered_pull_requests[offset:offset+length]
474 478 else:
475 479 return _filtered_pull_requests
476 480
477 481 def get_not_reviewed(self, user_id):
478 482 return [
479 483 x.pull_request for x in PullRequestReviewers.query().filter(
480 484 PullRequestReviewers.user_id == user_id).all()
481 485 ]
482 486
483 487 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
484 488 order_by=None, order_dir='desc'):
485 489 q = PullRequest.query()
486 490 if user_id:
487 491 reviewers_subquery = Session().query(
488 492 PullRequestReviewers.pull_request_id).filter(
489 493 PullRequestReviewers.user_id == user_id).subquery()
490 494 user_filter = or_(
491 495 PullRequest.user_id == user_id,
492 496 PullRequest.pull_request_id.in_(reviewers_subquery)
493 497 )
494 498 q = PullRequest.query().filter(user_filter)
495 499
496 500 # closed,opened
497 501 if statuses:
498 502 q = q.filter(PullRequest.status.in_(statuses))
499 503
500 504 if query:
501 505 like_expression = u'%{}%'.format(safe_unicode(query))
502 506 q = q.join(User)
503 507 q = q.filter(or_(
504 508 cast(PullRequest.pull_request_id, String).ilike(like_expression),
505 509 User.username.ilike(like_expression),
506 510 PullRequest.title.ilike(like_expression),
507 511 PullRequest.description.ilike(like_expression),
508 512 ))
509 513 if order_by:
510 514 order_map = {
511 515 'name_raw': PullRequest.pull_request_id,
512 516 'title': PullRequest.title,
513 517 'updated_on_raw': PullRequest.updated_on,
514 518 'target_repo': PullRequest.target_repo_id
515 519 }
516 520 if order_dir == 'asc':
517 521 q = q.order_by(order_map[order_by].asc())
518 522 else:
519 523 q = q.order_by(order_map[order_by].desc())
520 524
521 525 return q
522 526
523 527 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
524 528 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
525 529 return q.count()
526 530
527 531 def get_im_participating_in(
528 532 self, user_id=None, statuses=None, query='', offset=0,
529 533 length=None, order_by=None, order_dir='desc'):
530 534 """
531 535 Get all Pull requests that i'm participating in, or i have opened
532 536 """
533 537
534 538 q = self._prepare_participating_query(
535 539 user_id, statuses=statuses, query=query, order_by=order_by,
536 540 order_dir=order_dir)
537 541
538 542 if length:
539 543 pull_requests = q.limit(length).offset(offset).all()
540 544 else:
541 545 pull_requests = q.all()
542 546
543 547 return pull_requests
544 548
545 549 def get_versions(self, pull_request):
546 550 """
547 551 returns version of pull request sorted by ID descending
548 552 """
549 553 return PullRequestVersion.query()\
550 554 .filter(PullRequestVersion.pull_request == pull_request)\
551 555 .order_by(PullRequestVersion.pull_request_version_id.asc())\
552 556 .all()
553 557
554 558 def get_pr_version(self, pull_request_id, version=None):
555 559 at_version = None
556 560
557 561 if version and version == 'latest':
558 562 pull_request_ver = PullRequest.get(pull_request_id)
559 563 pull_request_obj = pull_request_ver
560 564 _org_pull_request_obj = pull_request_obj
561 565 at_version = 'latest'
562 566 elif version:
563 567 pull_request_ver = PullRequestVersion.get_or_404(version)
564 568 pull_request_obj = pull_request_ver
565 569 _org_pull_request_obj = pull_request_ver.pull_request
566 570 at_version = pull_request_ver.pull_request_version_id
567 571 else:
568 572 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
569 573 pull_request_id)
570 574
571 575 pull_request_display_obj = PullRequest.get_pr_display_object(
572 576 pull_request_obj, _org_pull_request_obj)
573 577
574 578 return _org_pull_request_obj, pull_request_obj, \
575 579 pull_request_display_obj, at_version
576 580
577 581 def create(self, created_by, source_repo, source_ref, target_repo,
578 582 target_ref, revisions, reviewers, observers, title, description=None,
579 583 common_ancestor_id=None,
580 584 description_renderer=None,
581 585 reviewer_data=None, translator=None, auth_user=None):
582 586 translator = translator or get_current_request().translate
583 587
584 588 created_by_user = self._get_user(created_by)
585 589 auth_user = auth_user or created_by_user.AuthUser()
586 590 source_repo = self._get_repo(source_repo)
587 591 target_repo = self._get_repo(target_repo)
588 592
589 593 pull_request = PullRequest()
590 594 pull_request.source_repo = source_repo
591 595 pull_request.source_ref = source_ref
592 596 pull_request.target_repo = target_repo
593 597 pull_request.target_ref = target_ref
594 598 pull_request.revisions = revisions
595 599 pull_request.title = title
596 600 pull_request.description = description
597 601 pull_request.description_renderer = description_renderer
598 602 pull_request.author = created_by_user
599 603 pull_request.reviewer_data = reviewer_data
600 604 pull_request.pull_request_state = pull_request.STATE_CREATING
601 605 pull_request.common_ancestor_id = common_ancestor_id
602 606
603 607 Session().add(pull_request)
604 608 Session().flush()
605 609
606 610 reviewer_ids = set()
607 611 # members / reviewers
608 612 for reviewer_object in reviewers:
609 613 user_id, reasons, mandatory, role, rules = reviewer_object
610 614 user = self._get_user(user_id)
611 615
612 616 # skip duplicates
613 617 if user.user_id in reviewer_ids:
614 618 continue
615 619
616 620 reviewer_ids.add(user.user_id)
617 621
618 622 reviewer = PullRequestReviewers()
619 623 reviewer.user = user
620 624 reviewer.pull_request = pull_request
621 625 reviewer.reasons = reasons
622 626 reviewer.mandatory = mandatory
623 627 reviewer.role = role
624 628
625 629 # NOTE(marcink): pick only first rule for now
626 630 rule_id = list(rules)[0] if rules else None
627 631 rule = RepoReviewRule.get(rule_id) if rule_id else None
628 632 if rule:
629 633 review_group = rule.user_group_vote_rule(user_id)
630 634 # we check if this particular reviewer is member of a voting group
631 635 if review_group:
632 636 # NOTE(marcink):
633 637 # can be that user is member of more but we pick the first same,
634 638 # same as default reviewers algo
635 639 review_group = review_group[0]
636 640
637 641 rule_data = {
638 642 'rule_name':
639 643 rule.review_rule_name,
640 644 'rule_user_group_entry_id':
641 645 review_group.repo_review_rule_users_group_id,
642 646 'rule_user_group_name':
643 647 review_group.users_group.users_group_name,
644 648 'rule_user_group_members':
645 649 [x.user.username for x in review_group.users_group.members],
646 650 'rule_user_group_members_id':
647 651 [x.user.user_id for x in review_group.users_group.members],
648 652 }
649 653 # e.g {'vote_rule': -1, 'mandatory': True}
650 654 rule_data.update(review_group.rule_data())
651 655
652 656 reviewer.rule_data = rule_data
653 657
654 658 Session().add(reviewer)
655 659 Session().flush()
656 660
657 661 for observer_object in observers:
658 662 user_id, reasons, mandatory, role, rules = observer_object
659 663 user = self._get_user(user_id)
660 664
661 665 # skip duplicates from reviewers
662 666 if user.user_id in reviewer_ids:
663 667 continue
664 668
665 669 #reviewer_ids.add(user.user_id)
666 670
667 671 observer = PullRequestReviewers()
668 672 observer.user = user
669 673 observer.pull_request = pull_request
670 674 observer.reasons = reasons
671 675 observer.mandatory = mandatory
672 676 observer.role = role
673 677
674 678 # NOTE(marcink): pick only first rule for now
675 679 rule_id = list(rules)[0] if rules else None
676 680 rule = RepoReviewRule.get(rule_id) if rule_id else None
677 681 if rule:
678 682 # TODO(marcink): do we need this for observers ??
679 683 pass
680 684
681 685 Session().add(observer)
682 686 Session().flush()
683 687
684 688 # Set approval status to "Under Review" for all commits which are
685 689 # part of this pull request.
686 690 ChangesetStatusModel().set_status(
687 691 repo=target_repo,
688 692 status=ChangesetStatus.STATUS_UNDER_REVIEW,
689 693 user=created_by_user,
690 694 pull_request=pull_request
691 695 )
692 696 # we commit early at this point. This has to do with a fact
693 697 # that before queries do some row-locking. And because of that
694 698 # we need to commit and finish transaction before below validate call
695 699 # that for large repos could be long resulting in long row locks
696 700 Session().commit()
697 701
698 702 # prepare workspace, and run initial merge simulation. Set state during that
699 703 # operation
700 704 pull_request = PullRequest.get(pull_request.pull_request_id)
701 705
702 706 # set as merging, for merge simulation, and if finished to created so we mark
703 707 # simulation is working fine
704 708 with pull_request.set_state(PullRequest.STATE_MERGING,
705 709 final_state=PullRequest.STATE_CREATED) as state_obj:
706 710 MergeCheck.validate(
707 711 pull_request, auth_user=auth_user, translator=translator)
708 712
709 713 self.notify_reviewers(pull_request, reviewer_ids)
710 714 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
711 715
712 716 creation_data = pull_request.get_api_data(with_merge_state=False)
713 717 self._log_audit_action(
714 718 'repo.pull_request.create', {'data': creation_data},
715 719 auth_user, pull_request)
716 720
717 721 return pull_request
718 722
719 723 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
720 724 pull_request = self.__get_pull_request(pull_request)
721 725 target_scm = pull_request.target_repo.scm_instance()
722 726 if action == 'create':
723 727 trigger_hook = hooks_utils.trigger_create_pull_request_hook
724 728 elif action == 'merge':
725 729 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
726 730 elif action == 'close':
727 731 trigger_hook = hooks_utils.trigger_close_pull_request_hook
728 732 elif action == 'review_status_change':
729 733 trigger_hook = hooks_utils.trigger_review_pull_request_hook
730 734 elif action == 'update':
731 735 trigger_hook = hooks_utils.trigger_update_pull_request_hook
732 736 elif action == 'comment':
733 737 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
734 738 elif action == 'comment_edit':
735 739 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
736 740 else:
737 741 return
738 742
739 743 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
740 744 pull_request, action, trigger_hook)
741 745 trigger_hook(
742 746 username=user.username,
743 747 repo_name=pull_request.target_repo.repo_name,
744 748 repo_type=target_scm.alias,
745 749 pull_request=pull_request,
746 750 data=data)
747 751
748 752 def _get_commit_ids(self, pull_request):
749 753 """
750 754 Return the commit ids of the merged pull request.
751 755
752 756 This method is not dealing correctly yet with the lack of autoupdates
753 757 nor with the implicit target updates.
754 758 For example: if a commit in the source repo is already in the target it
755 759 will be reported anyways.
756 760 """
757 761 merge_rev = pull_request.merge_rev
758 762 if merge_rev is None:
759 763 raise ValueError('This pull request was not merged yet')
760 764
761 765 commit_ids = list(pull_request.revisions)
762 766 if merge_rev not in commit_ids:
763 767 commit_ids.append(merge_rev)
764 768
765 769 return commit_ids
766 770
767 771 def merge_repo(self, pull_request, user, extras):
768 772 log.debug("Merging pull request %s", pull_request.pull_request_id)
769 773 extras['user_agent'] = 'internal-merge'
770 774 merge_state = self._merge_pull_request(pull_request, user, extras)
771 775 if merge_state.executed:
772 776 log.debug("Merge was successful, updating the pull request comments.")
773 777 self._comment_and_close_pr(pull_request, user, merge_state)
774 778
775 779 self._log_audit_action(
776 780 'repo.pull_request.merge',
777 781 {'merge_state': merge_state.__dict__},
778 782 user, pull_request)
779 783
780 784 else:
781 785 log.warn("Merge failed, not updating the pull request.")
782 786 return merge_state
783 787
784 788 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
785 789 target_vcs = pull_request.target_repo.scm_instance()
786 790 source_vcs = pull_request.source_repo.scm_instance()
787 791
788 792 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
789 793 pr_id=pull_request.pull_request_id,
790 794 pr_title=pull_request.title,
791 795 source_repo=source_vcs.name,
792 796 source_ref_name=pull_request.source_ref_parts.name,
793 797 target_repo=target_vcs.name,
794 798 target_ref_name=pull_request.target_ref_parts.name,
795 799 )
796 800
797 801 workspace_id = self._workspace_id(pull_request)
798 802 repo_id = pull_request.target_repo.repo_id
799 803 use_rebase = self._use_rebase_for_merging(pull_request)
800 804 close_branch = self._close_branch_before_merging(pull_request)
801 805 user_name = self._user_name_for_merging(pull_request, user)
802 806
803 807 target_ref = self._refresh_reference(
804 808 pull_request.target_ref_parts, target_vcs)
805 809
806 810 callback_daemon, extras = prepare_callback_daemon(
807 811 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
808 812 host=vcs_settings.HOOKS_HOST,
809 813 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
810 814
811 815 with callback_daemon:
812 816 # TODO: johbo: Implement a clean way to run a config_override
813 817 # for a single call.
814 818 target_vcs.config.set(
815 819 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
816 820
817 821 merge_state = target_vcs.merge(
818 822 repo_id, workspace_id, target_ref, source_vcs,
819 823 pull_request.source_ref_parts,
820 824 user_name=user_name, user_email=user.email,
821 825 message=message, use_rebase=use_rebase,
822 826 close_branch=close_branch)
823 827 return merge_state
824 828
825 829 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
826 830 pull_request.merge_rev = merge_state.merge_ref.commit_id
827 831 pull_request.updated_on = datetime.datetime.now()
828 832 close_msg = close_msg or 'Pull request merged and closed'
829 833
830 834 CommentsModel().create(
831 835 text=safe_unicode(close_msg),
832 836 repo=pull_request.target_repo.repo_id,
833 837 user=user.user_id,
834 838 pull_request=pull_request.pull_request_id,
835 839 f_path=None,
836 840 line_no=None,
837 841 closing_pr=True
838 842 )
839 843
840 844 Session().add(pull_request)
841 845 Session().flush()
842 846 # TODO: paris: replace invalidation with less radical solution
843 847 ScmModel().mark_for_invalidation(
844 848 pull_request.target_repo.repo_name)
845 849 self.trigger_pull_request_hook(pull_request, user, 'merge')
846 850
847 851 def has_valid_update_type(self, pull_request):
848 852 source_ref_type = pull_request.source_ref_parts.type
849 853 return source_ref_type in self.REF_TYPES
850 854
851 855 def get_flow_commits(self, pull_request):
852 856
853 857 # source repo
854 858 source_ref_name = pull_request.source_ref_parts.name
855 859 source_ref_type = pull_request.source_ref_parts.type
856 860 source_ref_id = pull_request.source_ref_parts.commit_id
857 861 source_repo = pull_request.source_repo.scm_instance()
858 862
859 863 try:
860 864 if source_ref_type in self.REF_TYPES:
861 865 source_commit = source_repo.get_commit(source_ref_name)
862 866 else:
863 867 source_commit = source_repo.get_commit(source_ref_id)
864 868 except CommitDoesNotExistError:
865 869 raise SourceRefMissing()
866 870
867 871 # target repo
868 872 target_ref_name = pull_request.target_ref_parts.name
869 873 target_ref_type = pull_request.target_ref_parts.type
870 874 target_ref_id = pull_request.target_ref_parts.commit_id
871 875 target_repo = pull_request.target_repo.scm_instance()
872 876
873 877 try:
874 878 if target_ref_type in self.REF_TYPES:
875 879 target_commit = target_repo.get_commit(target_ref_name)
876 880 else:
877 881 target_commit = target_repo.get_commit(target_ref_id)
878 882 except CommitDoesNotExistError:
879 883 raise TargetRefMissing()
880 884
881 885 return source_commit, target_commit
882 886
883 887 def update_commits(self, pull_request, updating_user):
884 888 """
885 889 Get the updated list of commits for the pull request
886 890 and return the new pull request version and the list
887 891 of commits processed by this update action
888 892
889 893 updating_user is the user_object who triggered the update
890 894 """
891 895 pull_request = self.__get_pull_request(pull_request)
892 896 source_ref_type = pull_request.source_ref_parts.type
893 897 source_ref_name = pull_request.source_ref_parts.name
894 898 source_ref_id = pull_request.source_ref_parts.commit_id
895 899
896 900 target_ref_type = pull_request.target_ref_parts.type
897 901 target_ref_name = pull_request.target_ref_parts.name
898 902 target_ref_id = pull_request.target_ref_parts.commit_id
899 903
900 904 if not self.has_valid_update_type(pull_request):
901 905 log.debug("Skipping update of pull request %s due to ref type: %s",
902 906 pull_request, source_ref_type)
903 907 return UpdateResponse(
904 908 executed=False,
905 909 reason=UpdateFailureReason.WRONG_REF_TYPE,
906 910 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
907 911 source_changed=False, target_changed=False)
908 912
909 913 try:
910 914 source_commit, target_commit = self.get_flow_commits(pull_request)
911 915 except SourceRefMissing:
912 916 return UpdateResponse(
913 917 executed=False,
914 918 reason=UpdateFailureReason.MISSING_SOURCE_REF,
915 919 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
916 920 source_changed=False, target_changed=False)
917 921 except TargetRefMissing:
918 922 return UpdateResponse(
919 923 executed=False,
920 924 reason=UpdateFailureReason.MISSING_TARGET_REF,
921 925 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
922 926 source_changed=False, target_changed=False)
923 927
924 928 source_changed = source_ref_id != source_commit.raw_id
925 929 target_changed = target_ref_id != target_commit.raw_id
926 930
927 931 if not (source_changed or target_changed):
928 932 log.debug("Nothing changed in pull request %s", pull_request)
929 933 return UpdateResponse(
930 934 executed=False,
931 935 reason=UpdateFailureReason.NO_CHANGE,
932 936 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
933 937 source_changed=target_changed, target_changed=source_changed)
934 938
935 939 change_in_found = 'target repo' if target_changed else 'source repo'
936 940 log.debug('Updating pull request because of change in %s detected',
937 941 change_in_found)
938 942
939 943 # Finally there is a need for an update, in case of source change
940 944 # we create a new version, else just an update
941 945 if source_changed:
942 946 pull_request_version = self._create_version_from_snapshot(pull_request)
943 947 self._link_comments_to_version(pull_request_version)
944 948 else:
945 949 try:
946 950 ver = pull_request.versions[-1]
947 951 except IndexError:
948 952 ver = None
949 953
950 954 pull_request.pull_request_version_id = \
951 955 ver.pull_request_version_id if ver else None
952 956 pull_request_version = pull_request
953 957
954 958 source_repo = pull_request.source_repo.scm_instance()
955 959 target_repo = pull_request.target_repo.scm_instance()
956 960
957 961 # re-compute commit ids
958 962 old_commit_ids = pull_request.revisions
959 963 pre_load = ["author", "date", "message", "branch"]
960 964 commit_ranges = target_repo.compare(
961 965 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
962 966 pre_load=pre_load)
963 967
964 968 target_ref = target_commit.raw_id
965 969 source_ref = source_commit.raw_id
966 970 ancestor_commit_id = target_repo.get_common_ancestor(
967 971 target_ref, source_ref, source_repo)
968 972
969 973 if not ancestor_commit_id:
970 974 raise ValueError(
971 975 'cannot calculate diff info without a common ancestor. '
972 976 'Make sure both repositories are related, and have a common forking commit.')
973 977
974 978 pull_request.common_ancestor_id = ancestor_commit_id
975 979
976 980 pull_request.source_ref = '%s:%s:%s' % (
977 981 source_ref_type, source_ref_name, source_commit.raw_id)
978 982 pull_request.target_ref = '%s:%s:%s' % (
979 983 target_ref_type, target_ref_name, ancestor_commit_id)
980 984
981 985 pull_request.revisions = [
982 986 commit.raw_id for commit in reversed(commit_ranges)]
983 987 pull_request.updated_on = datetime.datetime.now()
984 988 Session().add(pull_request)
985 989 new_commit_ids = pull_request.revisions
986 990
987 991 old_diff_data, new_diff_data = self._generate_update_diffs(
988 992 pull_request, pull_request_version)
989 993
990 994 # calculate commit and file changes
991 995 commit_changes = self._calculate_commit_id_changes(
992 996 old_commit_ids, new_commit_ids)
993 997 file_changes = self._calculate_file_changes(
994 998 old_diff_data, new_diff_data)
995 999
996 1000 # set comments as outdated if DIFFS changed
997 1001 CommentsModel().outdate_comments(
998 1002 pull_request, old_diff_data=old_diff_data,
999 1003 new_diff_data=new_diff_data)
1000 1004
1001 1005 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1002 1006 file_node_changes = (
1003 1007 file_changes.added or file_changes.modified or file_changes.removed)
1004 1008 pr_has_changes = valid_commit_changes or file_node_changes
1005 1009
1006 1010 # Add an automatic comment to the pull request, in case
1007 1011 # anything has changed
1008 1012 if pr_has_changes:
1009 1013 update_comment = CommentsModel().create(
1010 1014 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1011 1015 repo=pull_request.target_repo,
1012 1016 user=pull_request.author,
1013 1017 pull_request=pull_request,
1014 1018 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1015 1019
1016 1020 # Update status to "Under Review" for added commits
1017 1021 for commit_id in commit_changes.added:
1018 1022 ChangesetStatusModel().set_status(
1019 1023 repo=pull_request.source_repo,
1020 1024 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1021 1025 comment=update_comment,
1022 1026 user=pull_request.author,
1023 1027 pull_request=pull_request,
1024 1028 revision=commit_id)
1025 1029
1026 1030 # send update email to users
1027 1031 try:
1028 1032 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1029 1033 ancestor_commit_id=ancestor_commit_id,
1030 1034 commit_changes=commit_changes,
1031 1035 file_changes=file_changes)
1032 1036 except Exception:
1033 1037 log.exception('Failed to send email notification to users')
1034 1038
1035 1039 log.debug(
1036 1040 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1037 1041 'removed_ids: %s', pull_request.pull_request_id,
1038 1042 commit_changes.added, commit_changes.common, commit_changes.removed)
1039 1043 log.debug(
1040 1044 'Updated pull request with the following file changes: %s',
1041 1045 file_changes)
1042 1046
1043 1047 log.info(
1044 1048 "Updated pull request %s from commit %s to commit %s, "
1045 1049 "stored new version %s of this pull request.",
1046 1050 pull_request.pull_request_id, source_ref_id,
1047 1051 pull_request.source_ref_parts.commit_id,
1048 1052 pull_request_version.pull_request_version_id)
1049 1053 Session().commit()
1050 1054 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1051 1055
1052 1056 return UpdateResponse(
1053 1057 executed=True, reason=UpdateFailureReason.NONE,
1054 1058 old=pull_request, new=pull_request_version,
1055 1059 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1056 1060 source_changed=source_changed, target_changed=target_changed)
1057 1061
1058 1062 def _create_version_from_snapshot(self, pull_request):
1059 1063 version = PullRequestVersion()
1060 1064 version.title = pull_request.title
1061 1065 version.description = pull_request.description
1062 1066 version.status = pull_request.status
1063 1067 version.pull_request_state = pull_request.pull_request_state
1064 1068 version.created_on = datetime.datetime.now()
1065 1069 version.updated_on = pull_request.updated_on
1066 1070 version.user_id = pull_request.user_id
1067 1071 version.source_repo = pull_request.source_repo
1068 1072 version.source_ref = pull_request.source_ref
1069 1073 version.target_repo = pull_request.target_repo
1070 1074 version.target_ref = pull_request.target_ref
1071 1075
1072 1076 version._last_merge_source_rev = pull_request._last_merge_source_rev
1073 1077 version._last_merge_target_rev = pull_request._last_merge_target_rev
1074 1078 version.last_merge_status = pull_request.last_merge_status
1075 1079 version.last_merge_metadata = pull_request.last_merge_metadata
1076 1080 version.shadow_merge_ref = pull_request.shadow_merge_ref
1077 1081 version.merge_rev = pull_request.merge_rev
1078 1082 version.reviewer_data = pull_request.reviewer_data
1079 1083
1080 1084 version.revisions = pull_request.revisions
1081 1085 version.common_ancestor_id = pull_request.common_ancestor_id
1082 1086 version.pull_request = pull_request
1083 1087 Session().add(version)
1084 1088 Session().flush()
1085 1089
1086 1090 return version
1087 1091
1088 1092 def _generate_update_diffs(self, pull_request, pull_request_version):
1089 1093
1090 1094 diff_context = (
1091 1095 self.DIFF_CONTEXT +
1092 1096 CommentsModel.needed_extra_diff_context())
1093 1097 hide_whitespace_changes = False
1094 1098 source_repo = pull_request_version.source_repo
1095 1099 source_ref_id = pull_request_version.source_ref_parts.commit_id
1096 1100 target_ref_id = pull_request_version.target_ref_parts.commit_id
1097 1101 old_diff = self._get_diff_from_pr_or_version(
1098 1102 source_repo, source_ref_id, target_ref_id,
1099 1103 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1100 1104
1101 1105 source_repo = pull_request.source_repo
1102 1106 source_ref_id = pull_request.source_ref_parts.commit_id
1103 1107 target_ref_id = pull_request.target_ref_parts.commit_id
1104 1108
1105 1109 new_diff = self._get_diff_from_pr_or_version(
1106 1110 source_repo, source_ref_id, target_ref_id,
1107 1111 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1108 1112
1109 1113 old_diff_data = diffs.DiffProcessor(old_diff)
1110 1114 old_diff_data.prepare()
1111 1115 new_diff_data = diffs.DiffProcessor(new_diff)
1112 1116 new_diff_data.prepare()
1113 1117
1114 1118 return old_diff_data, new_diff_data
1115 1119
1116 1120 def _link_comments_to_version(self, pull_request_version):
1117 1121 """
1118 1122 Link all unlinked comments of this pull request to the given version.
1119 1123
1120 1124 :param pull_request_version: The `PullRequestVersion` to which
1121 1125 the comments shall be linked.
1122 1126
1123 1127 """
1124 1128 pull_request = pull_request_version.pull_request
1125 1129 comments = ChangesetComment.query()\
1126 1130 .filter(
1127 1131 # TODO: johbo: Should we query for the repo at all here?
1128 1132 # Pending decision on how comments of PRs are to be related
1129 1133 # to either the source repo, the target repo or no repo at all.
1130 1134 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1131 1135 ChangesetComment.pull_request == pull_request,
1132 1136 ChangesetComment.pull_request_version == None)\
1133 1137 .order_by(ChangesetComment.comment_id.asc())
1134 1138
1135 1139 # TODO: johbo: Find out why this breaks if it is done in a bulk
1136 1140 # operation.
1137 1141 for comment in comments:
1138 1142 comment.pull_request_version_id = (
1139 1143 pull_request_version.pull_request_version_id)
1140 1144 Session().add(comment)
1141 1145
1142 1146 def _calculate_commit_id_changes(self, old_ids, new_ids):
1143 1147 added = [x for x in new_ids if x not in old_ids]
1144 1148 common = [x for x in new_ids if x in old_ids]
1145 1149 removed = [x for x in old_ids if x not in new_ids]
1146 1150 total = new_ids
1147 1151 return ChangeTuple(added, common, removed, total)
1148 1152
1149 1153 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1150 1154
1151 1155 old_files = OrderedDict()
1152 1156 for diff_data in old_diff_data.parsed_diff:
1153 1157 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1154 1158
1155 1159 added_files = []
1156 1160 modified_files = []
1157 1161 removed_files = []
1158 1162 for diff_data in new_diff_data.parsed_diff:
1159 1163 new_filename = diff_data['filename']
1160 1164 new_hash = md5_safe(diff_data['raw_diff'])
1161 1165
1162 1166 old_hash = old_files.get(new_filename)
1163 1167 if not old_hash:
1164 1168 # file is not present in old diff, we have to figure out from parsed diff
1165 1169 # operation ADD/REMOVE
1166 1170 operations_dict = diff_data['stats']['ops']
1167 1171 if diffs.DEL_FILENODE in operations_dict:
1168 1172 removed_files.append(new_filename)
1169 1173 else:
1170 1174 added_files.append(new_filename)
1171 1175 else:
1172 1176 if new_hash != old_hash:
1173 1177 modified_files.append(new_filename)
1174 1178 # now remove a file from old, since we have seen it already
1175 1179 del old_files[new_filename]
1176 1180
1177 1181 # removed files is when there are present in old, but not in NEW,
1178 1182 # since we remove old files that are present in new diff, left-overs
1179 1183 # if any should be the removed files
1180 1184 removed_files.extend(old_files.keys())
1181 1185
1182 1186 return FileChangeTuple(added_files, modified_files, removed_files)
1183 1187
1184 1188 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1185 1189 """
1186 1190 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1187 1191 so it's always looking the same disregarding on which default
1188 1192 renderer system is using.
1189 1193
1190 1194 :param ancestor_commit_id: ancestor raw_id
1191 1195 :param changes: changes named tuple
1192 1196 :param file_changes: file changes named tuple
1193 1197
1194 1198 """
1195 1199 new_status = ChangesetStatus.get_status_lbl(
1196 1200 ChangesetStatus.STATUS_UNDER_REVIEW)
1197 1201
1198 1202 changed_files = (
1199 1203 file_changes.added + file_changes.modified + file_changes.removed)
1200 1204
1201 1205 params = {
1202 1206 'under_review_label': new_status,
1203 1207 'added_commits': changes.added,
1204 1208 'removed_commits': changes.removed,
1205 1209 'changed_files': changed_files,
1206 1210 'added_files': file_changes.added,
1207 1211 'modified_files': file_changes.modified,
1208 1212 'removed_files': file_changes.removed,
1209 1213 'ancestor_commit_id': ancestor_commit_id
1210 1214 }
1211 1215 renderer = RstTemplateRenderer()
1212 1216 return renderer.render('pull_request_update.mako', **params)
1213 1217
1214 1218 def edit(self, pull_request, title, description, description_renderer, user):
1215 1219 pull_request = self.__get_pull_request(pull_request)
1216 1220 old_data = pull_request.get_api_data(with_merge_state=False)
1217 1221 if pull_request.is_closed():
1218 1222 raise ValueError('This pull request is closed')
1219 1223 if title:
1220 1224 pull_request.title = title
1221 1225 pull_request.description = description
1222 1226 pull_request.updated_on = datetime.datetime.now()
1223 1227 pull_request.description_renderer = description_renderer
1224 1228 Session().add(pull_request)
1225 1229 self._log_audit_action(
1226 1230 'repo.pull_request.edit', {'old_data': old_data},
1227 1231 user, pull_request)
1228 1232
1229 1233 def update_reviewers(self, pull_request, reviewer_data, user):
1230 1234 """
1231 1235 Update the reviewers in the pull request
1232 1236
1233 1237 :param pull_request: the pr to update
1234 1238 :param reviewer_data: list of tuples
1235 1239 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1236 1240 :param user: current use who triggers this action
1237 1241 """
1238 1242
1239 1243 pull_request = self.__get_pull_request(pull_request)
1240 1244 if pull_request.is_closed():
1241 1245 raise ValueError('This pull request is closed')
1242 1246
1243 1247 reviewers = {}
1244 1248 for user_id, reasons, mandatory, role, rules in reviewer_data:
1245 1249 if isinstance(user_id, (int, compat.string_types)):
1246 1250 user_id = self._get_user(user_id).user_id
1247 1251 reviewers[user_id] = {
1248 1252 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1249 1253
1250 1254 reviewers_ids = set(reviewers.keys())
1251 1255 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1252 1256 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1253 1257
1254 1258 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1255 1259
1256 1260 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1257 1261 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1258 1262
1259 1263 log.debug("Adding %s reviewers", ids_to_add)
1260 1264 log.debug("Removing %s reviewers", ids_to_remove)
1261 1265 changed = False
1262 1266 added_audit_reviewers = []
1263 1267 removed_audit_reviewers = []
1264 1268
1265 1269 for uid in ids_to_add:
1266 1270 changed = True
1267 1271 _usr = self._get_user(uid)
1268 1272 reviewer = PullRequestReviewers()
1269 1273 reviewer.user = _usr
1270 1274 reviewer.pull_request = pull_request
1271 1275 reviewer.reasons = reviewers[uid]['reasons']
1272 1276 # NOTE(marcink): mandatory shouldn't be changed now
1273 1277 # reviewer.mandatory = reviewers[uid]['reasons']
1274 1278 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1275 1279 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1276 1280 Session().add(reviewer)
1277 1281 added_audit_reviewers.append(reviewer.get_dict())
1278 1282
1279 1283 for uid in ids_to_remove:
1280 1284 changed = True
1281 1285 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1282 1286 # This is an edge case that handles previous state of having the same reviewer twice.
1283 1287 # this CAN happen due to the lack of DB checks
1284 1288 reviewers = PullRequestReviewers.query()\
1285 1289 .filter(PullRequestReviewers.user_id == uid,
1286 1290 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1287 1291 PullRequestReviewers.pull_request == pull_request)\
1288 1292 .all()
1289 1293
1290 1294 for obj in reviewers:
1291 1295 added_audit_reviewers.append(obj.get_dict())
1292 1296 Session().delete(obj)
1293 1297
1294 1298 if changed:
1295 1299 Session().expire_all()
1296 1300 pull_request.updated_on = datetime.datetime.now()
1297 1301 Session().add(pull_request)
1298 1302
1299 1303 # finally store audit logs
1300 1304 for user_data in added_audit_reviewers:
1301 1305 self._log_audit_action(
1302 1306 'repo.pull_request.reviewer.add', {'data': user_data},
1303 1307 user, pull_request)
1304 1308 for user_data in removed_audit_reviewers:
1305 1309 self._log_audit_action(
1306 1310 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1307 1311 user, pull_request)
1308 1312
1309 1313 self.notify_reviewers(pull_request, ids_to_add, user.get_instance())
1310 1314 return ids_to_add, ids_to_remove
1311 1315
1312 1316 def update_observers(self, pull_request, observer_data, user):
1313 1317 """
1314 1318 Update the observers in the pull request
1315 1319
1316 1320 :param pull_request: the pr to update
1317 1321 :param observer_data: list of tuples
1318 1322 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1319 1323 :param user: current use who triggers this action
1320 1324 """
1321 1325 pull_request = self.__get_pull_request(pull_request)
1322 1326 if pull_request.is_closed():
1323 1327 raise ValueError('This pull request is closed')
1324 1328
1325 1329 observers = {}
1326 1330 for user_id, reasons, mandatory, role, rules in observer_data:
1327 1331 if isinstance(user_id, (int, compat.string_types)):
1328 1332 user_id = self._get_user(user_id).user_id
1329 1333 observers[user_id] = {
1330 1334 'reasons': reasons, 'observers': mandatory, 'role': role}
1331 1335
1332 1336 observers_ids = set(observers.keys())
1333 1337 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1334 1338 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1335 1339
1336 1340 current_observers_ids = set([x.user.user_id for x in current_observers])
1337 1341
1338 1342 ids_to_add = observers_ids.difference(current_observers_ids)
1339 1343 ids_to_remove = current_observers_ids.difference(observers_ids)
1340 1344
1341 1345 log.debug("Adding %s observer", ids_to_add)
1342 1346 log.debug("Removing %s observer", ids_to_remove)
1343 1347 changed = False
1344 1348 added_audit_observers = []
1345 1349 removed_audit_observers = []
1346 1350
1347 1351 for uid in ids_to_add:
1348 1352 changed = True
1349 1353 _usr = self._get_user(uid)
1350 1354 observer = PullRequestReviewers()
1351 1355 observer.user = _usr
1352 1356 observer.pull_request = pull_request
1353 1357 observer.reasons = observers[uid]['reasons']
1354 1358 # NOTE(marcink): mandatory shouldn't be changed now
1355 1359 # observer.mandatory = observer[uid]['reasons']
1356 1360
1357 1361 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1358 1362 observer.role = PullRequestReviewers.ROLE_OBSERVER
1359 1363 Session().add(observer)
1360 1364 added_audit_observers.append(observer.get_dict())
1361 1365
1362 1366 for uid in ids_to_remove:
1363 1367 changed = True
1364 1368 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1365 1369 # This is an edge case that handles previous state of having the same reviewer twice.
1366 1370 # this CAN happen due to the lack of DB checks
1367 1371 observers = PullRequestReviewers.query()\
1368 1372 .filter(PullRequestReviewers.user_id == uid,
1369 1373 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1370 1374 PullRequestReviewers.pull_request == pull_request)\
1371 1375 .all()
1372 1376
1373 1377 for obj in observers:
1374 1378 added_audit_observers.append(obj.get_dict())
1375 1379 Session().delete(obj)
1376 1380
1377 1381 if changed:
1378 1382 Session().expire_all()
1379 1383 pull_request.updated_on = datetime.datetime.now()
1380 1384 Session().add(pull_request)
1381 1385
1382 1386 # finally store audit logs
1383 1387 for user_data in added_audit_observers:
1384 1388 self._log_audit_action(
1385 1389 'repo.pull_request.observer.add', {'data': user_data},
1386 1390 user, pull_request)
1387 1391 for user_data in removed_audit_observers:
1388 1392 self._log_audit_action(
1389 1393 'repo.pull_request.observer.delete', {'old_data': user_data},
1390 1394 user, pull_request)
1391 1395
1392 1396 self.notify_observers(pull_request, ids_to_add, user.get_instance())
1393 1397 return ids_to_add, ids_to_remove
1394 1398
1395 1399 def get_url(self, pull_request, request=None, permalink=False):
1396 1400 if not request:
1397 1401 request = get_current_request()
1398 1402
1399 1403 if permalink:
1400 1404 return request.route_url(
1401 1405 'pull_requests_global',
1402 1406 pull_request_id=pull_request.pull_request_id,)
1403 1407 else:
1404 1408 return request.route_url('pullrequest_show',
1405 1409 repo_name=safe_str(pull_request.target_repo.repo_name),
1406 1410 pull_request_id=pull_request.pull_request_id,)
1407 1411
1408 1412 def get_shadow_clone_url(self, pull_request, request=None):
1409 1413 """
1410 1414 Returns qualified url pointing to the shadow repository. If this pull
1411 1415 request is closed there is no shadow repository and ``None`` will be
1412 1416 returned.
1413 1417 """
1414 1418 if pull_request.is_closed():
1415 1419 return None
1416 1420 else:
1417 1421 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1418 1422 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1419 1423
1420 1424 def _notify_reviewers(self, pull_request, user_ids, role, user):
1421 1425 # notification to reviewers/observers
1422 1426 if not user_ids:
1423 1427 return
1424 1428
1425 1429 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1426 1430
1427 1431 pull_request_obj = pull_request
1428 1432 # get the current participants of this pull request
1429 1433 recipients = user_ids
1430 1434 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1431 1435
1432 1436 pr_source_repo = pull_request_obj.source_repo
1433 1437 pr_target_repo = pull_request_obj.target_repo
1434 1438
1435 1439 pr_url = h.route_url('pullrequest_show',
1436 1440 repo_name=pr_target_repo.repo_name,
1437 1441 pull_request_id=pull_request_obj.pull_request_id,)
1438 1442
1439 1443 # set some variables for email notification
1440 1444 pr_target_repo_url = h.route_url(
1441 1445 'repo_summary', repo_name=pr_target_repo.repo_name)
1442 1446
1443 1447 pr_source_repo_url = h.route_url(
1444 1448 'repo_summary', repo_name=pr_source_repo.repo_name)
1445 1449
1446 1450 # pull request specifics
1447 1451 pull_request_commits = [
1448 1452 (x.raw_id, x.message)
1449 1453 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1450 1454
1451 1455 current_rhodecode_user = user
1452 1456 kwargs = {
1453 1457 'user': current_rhodecode_user,
1454 1458 'pull_request_author': pull_request.author,
1455 1459 'pull_request': pull_request_obj,
1456 1460 'pull_request_commits': pull_request_commits,
1457 1461
1458 1462 'pull_request_target_repo': pr_target_repo,
1459 1463 'pull_request_target_repo_url': pr_target_repo_url,
1460 1464
1461 1465 'pull_request_source_repo': pr_source_repo,
1462 1466 'pull_request_source_repo_url': pr_source_repo_url,
1463 1467
1464 1468 'pull_request_url': pr_url,
1465 1469 'thread_ids': [pr_url],
1466 1470 'user_role': role
1467 1471 }
1468 1472
1469 1473 # pre-generate the subject for notification itself
1470 1474 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1471 1475 notification_type, **kwargs)
1472 1476
1473 1477 # create notification objects, and emails
1474 1478 NotificationModel().create(
1475 1479 created_by=current_rhodecode_user,
1476 1480 notification_subject=subject,
1477 1481 notification_body=body_plaintext,
1478 1482 notification_type=notification_type,
1479 1483 recipients=recipients,
1480 1484 email_kwargs=kwargs,
1481 1485 )
1482 1486
1483 1487 def notify_reviewers(self, pull_request, reviewers_ids, user):
1484 1488 return self._notify_reviewers(pull_request, reviewers_ids,
1485 1489 PullRequestReviewers.ROLE_REVIEWER, user)
1486 1490
1487 1491 def notify_observers(self, pull_request, observers_ids, user):
1488 1492 return self._notify_reviewers(pull_request, observers_ids,
1489 1493 PullRequestReviewers.ROLE_OBSERVER, user)
1490 1494
1491 1495 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1492 1496 commit_changes, file_changes):
1493 1497
1494 1498 updating_user_id = updating_user.user_id
1495 1499 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1496 1500 # NOTE(marcink): send notification to all other users except to
1497 1501 # person who updated the PR
1498 1502 recipients = reviewers.difference(set([updating_user_id]))
1499 1503
1500 1504 log.debug('Notify following recipients about pull-request update %s', recipients)
1501 1505
1502 1506 pull_request_obj = pull_request
1503 1507
1504 1508 # send email about the update
1505 1509 changed_files = (
1506 1510 file_changes.added + file_changes.modified + file_changes.removed)
1507 1511
1508 1512 pr_source_repo = pull_request_obj.source_repo
1509 1513 pr_target_repo = pull_request_obj.target_repo
1510 1514
1511 1515 pr_url = h.route_url('pullrequest_show',
1512 1516 repo_name=pr_target_repo.repo_name,
1513 1517 pull_request_id=pull_request_obj.pull_request_id,)
1514 1518
1515 1519 # set some variables for email notification
1516 1520 pr_target_repo_url = h.route_url(
1517 1521 'repo_summary', repo_name=pr_target_repo.repo_name)
1518 1522
1519 1523 pr_source_repo_url = h.route_url(
1520 1524 'repo_summary', repo_name=pr_source_repo.repo_name)
1521 1525
1522 1526 email_kwargs = {
1523 1527 'date': datetime.datetime.now(),
1524 1528 'updating_user': updating_user,
1525 1529
1526 1530 'pull_request': pull_request_obj,
1527 1531
1528 1532 'pull_request_target_repo': pr_target_repo,
1529 1533 'pull_request_target_repo_url': pr_target_repo_url,
1530 1534
1531 1535 'pull_request_source_repo': pr_source_repo,
1532 1536 'pull_request_source_repo_url': pr_source_repo_url,
1533 1537
1534 1538 'pull_request_url': pr_url,
1535 1539
1536 1540 'ancestor_commit_id': ancestor_commit_id,
1537 1541 'added_commits': commit_changes.added,
1538 1542 'removed_commits': commit_changes.removed,
1539 1543 'changed_files': changed_files,
1540 1544 'added_files': file_changes.added,
1541 1545 'modified_files': file_changes.modified,
1542 1546 'removed_files': file_changes.removed,
1543 1547 'thread_ids': [pr_url],
1544 1548 }
1545 1549
1546 1550 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1547 1551 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1548 1552
1549 1553 # create notification objects, and emails
1550 1554 NotificationModel().create(
1551 1555 created_by=updating_user,
1552 1556 notification_subject=subject,
1553 1557 notification_body=body_plaintext,
1554 1558 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1555 1559 recipients=recipients,
1556 1560 email_kwargs=email_kwargs,
1557 1561 )
1558 1562
1559 1563 def delete(self, pull_request, user=None):
1560 1564 if not user:
1561 1565 user = getattr(get_current_rhodecode_user(), 'username', None)
1562 1566
1563 1567 pull_request = self.__get_pull_request(pull_request)
1564 1568 old_data = pull_request.get_api_data(with_merge_state=False)
1565 1569 self._cleanup_merge_workspace(pull_request)
1566 1570 self._log_audit_action(
1567 1571 'repo.pull_request.delete', {'old_data': old_data},
1568 1572 user, pull_request)
1569 1573 Session().delete(pull_request)
1570 1574
1571 1575 def close_pull_request(self, pull_request, user):
1572 1576 pull_request = self.__get_pull_request(pull_request)
1573 1577 self._cleanup_merge_workspace(pull_request)
1574 1578 pull_request.status = PullRequest.STATUS_CLOSED
1575 1579 pull_request.updated_on = datetime.datetime.now()
1576 1580 Session().add(pull_request)
1577 1581 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1578 1582
1579 1583 pr_data = pull_request.get_api_data(with_merge_state=False)
1580 1584 self._log_audit_action(
1581 1585 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1582 1586
1583 1587 def close_pull_request_with_comment(
1584 1588 self, pull_request, user, repo, message=None, auth_user=None):
1585 1589
1586 1590 pull_request_review_status = pull_request.calculated_review_status()
1587 1591
1588 1592 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1589 1593 # approved only if we have voting consent
1590 1594 status = ChangesetStatus.STATUS_APPROVED
1591 1595 else:
1592 1596 status = ChangesetStatus.STATUS_REJECTED
1593 1597 status_lbl = ChangesetStatus.get_status_lbl(status)
1594 1598
1595 1599 default_message = (
1596 1600 'Closing with status change {transition_icon} {status}.'
1597 1601 ).format(transition_icon='>', status=status_lbl)
1598 1602 text = message or default_message
1599 1603
1600 1604 # create a comment, and link it to new status
1601 1605 comment = CommentsModel().create(
1602 1606 text=text,
1603 1607 repo=repo.repo_id,
1604 1608 user=user.user_id,
1605 1609 pull_request=pull_request.pull_request_id,
1606 1610 status_change=status_lbl,
1607 1611 status_change_type=status,
1608 1612 closing_pr=True,
1609 1613 auth_user=auth_user,
1610 1614 )
1611 1615
1612 1616 # calculate old status before we change it
1613 1617 old_calculated_status = pull_request.calculated_review_status()
1614 1618 ChangesetStatusModel().set_status(
1615 1619 repo.repo_id,
1616 1620 status,
1617 1621 user.user_id,
1618 1622 comment=comment,
1619 1623 pull_request=pull_request.pull_request_id
1620 1624 )
1621 1625
1622 1626 Session().flush()
1623 1627
1624 1628 self.trigger_pull_request_hook(pull_request, user, 'comment',
1625 1629 data={'comment': comment})
1626 1630
1627 1631 # we now calculate the status of pull request again, and based on that
1628 1632 # calculation trigger status change. This might happen in cases
1629 1633 # that non-reviewer admin closes a pr, which means his vote doesn't
1630 1634 # change the status, while if he's a reviewer this might change it.
1631 1635 calculated_status = pull_request.calculated_review_status()
1632 1636 if old_calculated_status != calculated_status:
1633 1637 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1634 1638 data={'status': calculated_status})
1635 1639
1636 1640 # finally close the PR
1637 1641 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1638 1642
1639 1643 return comment, status
1640 1644
1641 1645 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1642 1646 _ = translator or get_current_request().translate
1643 1647
1644 1648 if not self._is_merge_enabled(pull_request):
1645 1649 return None, False, _('Server-side pull request merging is disabled.')
1646 1650
1647 1651 if pull_request.is_closed():
1648 1652 return None, False, _('This pull request is closed.')
1649 1653
1650 1654 merge_possible, msg = self._check_repo_requirements(
1651 1655 target=pull_request.target_repo, source=pull_request.source_repo,
1652 1656 translator=_)
1653 1657 if not merge_possible:
1654 1658 return None, merge_possible, msg
1655 1659
1656 1660 try:
1657 1661 merge_response = self._try_merge(
1658 1662 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1659 1663 log.debug("Merge response: %s", merge_response)
1660 1664 return merge_response, merge_response.possible, merge_response.merge_status_message
1661 1665 except NotImplementedError:
1662 1666 return None, False, _('Pull request merging is not supported.')
1663 1667
1664 1668 def _check_repo_requirements(self, target, source, translator):
1665 1669 """
1666 1670 Check if `target` and `source` have compatible requirements.
1667 1671
1668 1672 Currently this is just checking for largefiles.
1669 1673 """
1670 1674 _ = translator
1671 1675 target_has_largefiles = self._has_largefiles(target)
1672 1676 source_has_largefiles = self._has_largefiles(source)
1673 1677 merge_possible = True
1674 1678 message = u''
1675 1679
1676 1680 if target_has_largefiles != source_has_largefiles:
1677 1681 merge_possible = False
1678 1682 if source_has_largefiles:
1679 1683 message = _(
1680 1684 'Target repository large files support is disabled.')
1681 1685 else:
1682 1686 message = _(
1683 1687 'Source repository large files support is disabled.')
1684 1688
1685 1689 return merge_possible, message
1686 1690
1687 1691 def _has_largefiles(self, repo):
1688 1692 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1689 1693 'extensions', 'largefiles')
1690 1694 return largefiles_ui and largefiles_ui[0].active
1691 1695
1692 1696 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1693 1697 """
1694 1698 Try to merge the pull request and return the merge status.
1695 1699 """
1696 1700 log.debug(
1697 1701 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1698 1702 pull_request.pull_request_id, force_shadow_repo_refresh)
1699 1703 target_vcs = pull_request.target_repo.scm_instance()
1700 1704 # Refresh the target reference.
1701 1705 try:
1702 1706 target_ref = self._refresh_reference(
1703 1707 pull_request.target_ref_parts, target_vcs)
1704 1708 except CommitDoesNotExistError:
1705 1709 merge_state = MergeResponse(
1706 1710 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1707 1711 metadata={'target_ref': pull_request.target_ref_parts})
1708 1712 return merge_state
1709 1713
1710 1714 target_locked = pull_request.target_repo.locked
1711 1715 if target_locked and target_locked[0]:
1712 1716 locked_by = 'user:{}'.format(target_locked[0])
1713 1717 log.debug("The target repository is locked by %s.", locked_by)
1714 1718 merge_state = MergeResponse(
1715 1719 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1716 1720 metadata={'locked_by': locked_by})
1717 1721 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1718 1722 pull_request, target_ref):
1719 1723 log.debug("Refreshing the merge status of the repository.")
1720 1724 merge_state = self._refresh_merge_state(
1721 1725 pull_request, target_vcs, target_ref)
1722 1726 else:
1723 1727 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1724 1728 metadata = {
1725 1729 'unresolved_files': '',
1726 1730 'target_ref': pull_request.target_ref_parts,
1727 1731 'source_ref': pull_request.source_ref_parts,
1728 1732 }
1729 1733 if pull_request.last_merge_metadata:
1730 1734 metadata.update(pull_request.last_merge_metadata_parsed)
1731 1735
1732 1736 if not possible and target_ref.type == 'branch':
1733 1737 # NOTE(marcink): case for mercurial multiple heads on branch
1734 1738 heads = target_vcs._heads(target_ref.name)
1735 1739 if len(heads) != 1:
1736 1740 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1737 1741 metadata.update({
1738 1742 'heads': heads
1739 1743 })
1740 1744
1741 1745 merge_state = MergeResponse(
1742 1746 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1743 1747
1744 1748 return merge_state
1745 1749
1746 1750 def _refresh_reference(self, reference, vcs_repository):
1747 1751 if reference.type in self.UPDATABLE_REF_TYPES:
1748 1752 name_or_id = reference.name
1749 1753 else:
1750 1754 name_or_id = reference.commit_id
1751 1755
1752 1756 refreshed_commit = vcs_repository.get_commit(name_or_id)
1753 1757 refreshed_reference = Reference(
1754 1758 reference.type, reference.name, refreshed_commit.raw_id)
1755 1759 return refreshed_reference
1756 1760
1757 1761 def _needs_merge_state_refresh(self, pull_request, target_reference):
1758 1762 return not(
1759 1763 pull_request.revisions and
1760 1764 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1761 1765 target_reference.commit_id == pull_request._last_merge_target_rev)
1762 1766
1763 1767 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1764 1768 workspace_id = self._workspace_id(pull_request)
1765 1769 source_vcs = pull_request.source_repo.scm_instance()
1766 1770 repo_id = pull_request.target_repo.repo_id
1767 1771 use_rebase = self._use_rebase_for_merging(pull_request)
1768 1772 close_branch = self._close_branch_before_merging(pull_request)
1769 1773 merge_state = target_vcs.merge(
1770 1774 repo_id, workspace_id,
1771 1775 target_reference, source_vcs, pull_request.source_ref_parts,
1772 1776 dry_run=True, use_rebase=use_rebase,
1773 1777 close_branch=close_branch)
1774 1778
1775 1779 # Do not store the response if there was an unknown error.
1776 1780 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1777 1781 pull_request._last_merge_source_rev = \
1778 1782 pull_request.source_ref_parts.commit_id
1779 1783 pull_request._last_merge_target_rev = target_reference.commit_id
1780 1784 pull_request.last_merge_status = merge_state.failure_reason
1781 1785 pull_request.last_merge_metadata = merge_state.metadata
1782 1786
1783 1787 pull_request.shadow_merge_ref = merge_state.merge_ref
1784 1788 Session().add(pull_request)
1785 1789 Session().commit()
1786 1790
1787 1791 return merge_state
1788 1792
1789 1793 def _workspace_id(self, pull_request):
1790 1794 workspace_id = 'pr-%s' % pull_request.pull_request_id
1791 1795 return workspace_id
1792 1796
1793 1797 def generate_repo_data(self, repo, commit_id=None, branch=None,
1794 1798 bookmark=None, translator=None):
1795 1799 from rhodecode.model.repo import RepoModel
1796 1800
1797 1801 all_refs, selected_ref = \
1798 1802 self._get_repo_pullrequest_sources(
1799 1803 repo.scm_instance(), commit_id=commit_id,
1800 1804 branch=branch, bookmark=bookmark, translator=translator)
1801 1805
1802 1806 refs_select2 = []
1803 1807 for element in all_refs:
1804 1808 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1805 1809 refs_select2.append({'text': element[1], 'children': children})
1806 1810
1807 1811 return {
1808 1812 'user': {
1809 1813 'user_id': repo.user.user_id,
1810 1814 'username': repo.user.username,
1811 1815 'firstname': repo.user.first_name,
1812 1816 'lastname': repo.user.last_name,
1813 1817 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1814 1818 },
1815 1819 'name': repo.repo_name,
1816 1820 'link': RepoModel().get_url(repo),
1817 1821 'description': h.chop_at_smart(repo.description_safe, '\n'),
1818 1822 'refs': {
1819 1823 'all_refs': all_refs,
1820 1824 'selected_ref': selected_ref,
1821 1825 'select2_refs': refs_select2
1822 1826 }
1823 1827 }
1824 1828
1825 1829 def generate_pullrequest_title(self, source, source_ref, target):
1826 1830 return u'{source}#{at_ref} to {target}'.format(
1827 1831 source=source,
1828 1832 at_ref=source_ref,
1829 1833 target=target,
1830 1834 )
1831 1835
1832 1836 def _cleanup_merge_workspace(self, pull_request):
1833 1837 # Merging related cleanup
1834 1838 repo_id = pull_request.target_repo.repo_id
1835 1839 target_scm = pull_request.target_repo.scm_instance()
1836 1840 workspace_id = self._workspace_id(pull_request)
1837 1841
1838 1842 try:
1839 1843 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1840 1844 except NotImplementedError:
1841 1845 pass
1842 1846
1843 1847 def _get_repo_pullrequest_sources(
1844 1848 self, repo, commit_id=None, branch=None, bookmark=None,
1845 1849 translator=None):
1846 1850 """
1847 1851 Return a structure with repo's interesting commits, suitable for
1848 1852 the selectors in pullrequest controller
1849 1853
1850 1854 :param commit_id: a commit that must be in the list somehow
1851 1855 and selected by default
1852 1856 :param branch: a branch that must be in the list and selected
1853 1857 by default - even if closed
1854 1858 :param bookmark: a bookmark that must be in the list and selected
1855 1859 """
1856 1860 _ = translator or get_current_request().translate
1857 1861
1858 1862 commit_id = safe_str(commit_id) if commit_id else None
1859 1863 branch = safe_unicode(branch) if branch else None
1860 1864 bookmark = safe_unicode(bookmark) if bookmark else None
1861 1865
1862 1866 selected = None
1863 1867
1864 1868 # order matters: first source that has commit_id in it will be selected
1865 1869 sources = []
1866 1870 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1867 1871 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1868 1872
1869 1873 if commit_id:
1870 1874 ref_commit = (h.short_id(commit_id), commit_id)
1871 1875 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1872 1876
1873 1877 sources.append(
1874 1878 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1875 1879 )
1876 1880
1877 1881 groups = []
1878 1882
1879 1883 for group_key, ref_list, group_name, match in sources:
1880 1884 group_refs = []
1881 1885 for ref_name, ref_id in ref_list:
1882 1886 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1883 1887 group_refs.append((ref_key, ref_name))
1884 1888
1885 1889 if not selected:
1886 1890 if set([commit_id, match]) & set([ref_id, ref_name]):
1887 1891 selected = ref_key
1888 1892
1889 1893 if group_refs:
1890 1894 groups.append((group_refs, group_name))
1891 1895
1892 1896 if not selected:
1893 1897 ref = commit_id or branch or bookmark
1894 1898 if ref:
1895 1899 raise CommitDoesNotExistError(
1896 1900 u'No commit refs could be found matching: {}'.format(ref))
1897 1901 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1898 1902 selected = u'branch:{}:{}'.format(
1899 1903 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1900 1904 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1901 1905 )
1902 1906 elif repo.commit_ids:
1903 1907 # make the user select in this case
1904 1908 selected = None
1905 1909 else:
1906 1910 raise EmptyRepositoryError()
1907 1911 return groups, selected
1908 1912
1909 1913 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1910 1914 hide_whitespace_changes, diff_context):
1911 1915
1912 1916 return self._get_diff_from_pr_or_version(
1913 1917 source_repo, source_ref_id, target_ref_id,
1914 1918 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1915 1919
1916 1920 def _get_diff_from_pr_or_version(
1917 1921 self, source_repo, source_ref_id, target_ref_id,
1918 1922 hide_whitespace_changes, diff_context):
1919 1923
1920 1924 target_commit = source_repo.get_commit(
1921 1925 commit_id=safe_str(target_ref_id))
1922 1926 source_commit = source_repo.get_commit(
1923 1927 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1924 1928 if isinstance(source_repo, Repository):
1925 1929 vcs_repo = source_repo.scm_instance()
1926 1930 else:
1927 1931 vcs_repo = source_repo
1928 1932
1929 1933 # TODO: johbo: In the context of an update, we cannot reach
1930 1934 # the old commit anymore with our normal mechanisms. It needs
1931 1935 # some sort of special support in the vcs layer to avoid this
1932 1936 # workaround.
1933 1937 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1934 1938 vcs_repo.alias == 'git'):
1935 1939 source_commit.raw_id = safe_str(source_ref_id)
1936 1940
1937 1941 log.debug('calculating diff between '
1938 1942 'source_ref:%s and target_ref:%s for repo `%s`',
1939 1943 target_ref_id, source_ref_id,
1940 1944 safe_unicode(vcs_repo.path))
1941 1945
1942 1946 vcs_diff = vcs_repo.get_diff(
1943 1947 commit1=target_commit, commit2=source_commit,
1944 1948 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1945 1949 return vcs_diff
1946 1950
1947 1951 def _is_merge_enabled(self, pull_request):
1948 1952 return self._get_general_setting(
1949 1953 pull_request, 'rhodecode_pr_merge_enabled')
1950 1954
1951 1955 def _use_rebase_for_merging(self, pull_request):
1952 1956 repo_type = pull_request.target_repo.repo_type
1953 1957 if repo_type == 'hg':
1954 1958 return self._get_general_setting(
1955 1959 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1956 1960 elif repo_type == 'git':
1957 1961 return self._get_general_setting(
1958 1962 pull_request, 'rhodecode_git_use_rebase_for_merging')
1959 1963
1960 1964 return False
1961 1965
1962 1966 def _user_name_for_merging(self, pull_request, user):
1963 1967 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1964 1968 if env_user_name_attr and hasattr(user, env_user_name_attr):
1965 1969 user_name_attr = env_user_name_attr
1966 1970 else:
1967 1971 user_name_attr = 'short_contact'
1968 1972
1969 1973 user_name = getattr(user, user_name_attr)
1970 1974 return user_name
1971 1975
1972 1976 def _close_branch_before_merging(self, pull_request):
1973 1977 repo_type = pull_request.target_repo.repo_type
1974 1978 if repo_type == 'hg':
1975 1979 return self._get_general_setting(
1976 1980 pull_request, 'rhodecode_hg_close_branch_before_merging')
1977 1981 elif repo_type == 'git':
1978 1982 return self._get_general_setting(
1979 1983 pull_request, 'rhodecode_git_close_branch_before_merging')
1980 1984
1981 1985 return False
1982 1986
1983 1987 def _get_general_setting(self, pull_request, settings_key, default=False):
1984 1988 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1985 1989 settings = settings_model.get_general_settings()
1986 1990 return settings.get(settings_key, default)
1987 1991
1988 1992 def _log_audit_action(self, action, action_data, user, pull_request):
1989 1993 audit_logger.store(
1990 1994 action=action,
1991 1995 action_data=action_data,
1992 1996 user=user,
1993 1997 repo=pull_request.target_repo)
1994 1998
1995 1999 def get_reviewer_functions(self):
1996 2000 """
1997 2001 Fetches functions for validation and fetching default reviewers.
1998 2002 If available we use the EE package, else we fallback to CE
1999 2003 package functions
2000 2004 """
2001 2005 try:
2002 2006 from rc_reviewers.utils import get_default_reviewers_data
2003 2007 from rc_reviewers.utils import validate_default_reviewers
2004 2008 from rc_reviewers.utils import validate_observers
2005 2009 except ImportError:
2006 2010 from rhodecode.apps.repository.utils import get_default_reviewers_data
2007 2011 from rhodecode.apps.repository.utils import validate_default_reviewers
2008 2012 from rhodecode.apps.repository.utils import validate_observers
2009 2013
2010 2014 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2011 2015
2012 2016
2013 2017 class MergeCheck(object):
2014 2018 """
2015 2019 Perform Merge Checks and returns a check object which stores information
2016 2020 about merge errors, and merge conditions
2017 2021 """
2018 2022 TODO_CHECK = 'todo'
2019 2023 PERM_CHECK = 'perm'
2020 2024 REVIEW_CHECK = 'review'
2021 2025 MERGE_CHECK = 'merge'
2022 2026 WIP_CHECK = 'wip'
2023 2027
2024 2028 def __init__(self):
2025 2029 self.review_status = None
2026 2030 self.merge_possible = None
2027 2031 self.merge_msg = ''
2028 2032 self.merge_response = None
2029 2033 self.failed = None
2030 2034 self.errors = []
2031 2035 self.error_details = OrderedDict()
2032 2036 self.source_commit = AttributeDict()
2033 2037 self.target_commit = AttributeDict()
2034 2038
2035 2039 def __repr__(self):
2036 2040 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2037 2041 self.merge_possible, self.failed, self.errors)
2038 2042
2039 2043 def push_error(self, error_type, message, error_key, details):
2040 2044 self.failed = True
2041 2045 self.errors.append([error_type, message])
2042 2046 self.error_details[error_key] = dict(
2043 2047 details=details,
2044 2048 error_type=error_type,
2045 2049 message=message
2046 2050 )
2047 2051
2048 2052 @classmethod
2049 2053 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2050 2054 force_shadow_repo_refresh=False):
2051 2055 _ = translator
2052 2056 merge_check = cls()
2053 2057
2054 2058 # title has WIP:
2055 2059 if pull_request.work_in_progress:
2056 2060 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2057 2061
2058 2062 msg = _('WIP marker in title prevents from accidental merge.')
2059 2063 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2060 2064 if fail_early:
2061 2065 return merge_check
2062 2066
2063 2067 # permissions to merge
2064 2068 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2065 2069 if not user_allowed_to_merge:
2066 2070 log.debug("MergeCheck: cannot merge, approval is pending.")
2067 2071
2068 2072 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2069 2073 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2070 2074 if fail_early:
2071 2075 return merge_check
2072 2076
2073 2077 # permission to merge into the target branch
2074 2078 target_commit_id = pull_request.target_ref_parts.commit_id
2075 2079 if pull_request.target_ref_parts.type == 'branch':
2076 2080 branch_name = pull_request.target_ref_parts.name
2077 2081 else:
2078 2082 # for mercurial we can always figure out the branch from the commit
2079 2083 # in case of bookmark
2080 2084 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2081 2085 branch_name = target_commit.branch
2082 2086
2083 2087 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2084 2088 pull_request.target_repo.repo_name, branch_name)
2085 2089 if branch_perm and branch_perm == 'branch.none':
2086 2090 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2087 2091 branch_name, rule)
2088 2092 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2089 2093 if fail_early:
2090 2094 return merge_check
2091 2095
2092 2096 # review status, must be always present
2093 2097 review_status = pull_request.calculated_review_status()
2094 2098 merge_check.review_status = review_status
2095 2099
2096 2100 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2097 2101 if not status_approved:
2098 2102 log.debug("MergeCheck: cannot merge, approval is pending.")
2099 2103
2100 2104 msg = _('Pull request reviewer approval is pending.')
2101 2105
2102 2106 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2103 2107
2104 2108 if fail_early:
2105 2109 return merge_check
2106 2110
2107 2111 # left over TODOs
2108 2112 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2109 2113 if todos:
2110 2114 log.debug("MergeCheck: cannot merge, {} "
2111 2115 "unresolved TODOs left.".format(len(todos)))
2112 2116
2113 2117 if len(todos) == 1:
2114 2118 msg = _('Cannot merge, {} TODO still not resolved.').format(
2115 2119 len(todos))
2116 2120 else:
2117 2121 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2118 2122 len(todos))
2119 2123
2120 2124 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2121 2125
2122 2126 if fail_early:
2123 2127 return merge_check
2124 2128
2125 2129 # merge possible, here is the filesystem simulation + shadow repo
2126 2130 merge_response, merge_status, msg = PullRequestModel().merge_status(
2127 2131 pull_request, translator=translator,
2128 2132 force_shadow_repo_refresh=force_shadow_repo_refresh)
2129 2133
2130 2134 merge_check.merge_possible = merge_status
2131 2135 merge_check.merge_msg = msg
2132 2136 merge_check.merge_response = merge_response
2133 2137
2134 2138 source_ref_id = pull_request.source_ref_parts.commit_id
2135 2139 target_ref_id = pull_request.target_ref_parts.commit_id
2136 2140
2137 2141 try:
2138 2142 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2139 2143 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2140 2144 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2141 2145 merge_check.source_commit.current_raw_id = source_commit.raw_id
2142 2146 merge_check.source_commit.previous_raw_id = source_ref_id
2143 2147
2144 2148 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2145 2149 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2146 2150 merge_check.target_commit.current_raw_id = target_commit.raw_id
2147 2151 merge_check.target_commit.previous_raw_id = target_ref_id
2148 2152 except (SourceRefMissing, TargetRefMissing):
2149 2153 pass
2150 2154
2151 2155 if not merge_status:
2152 2156 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2153 2157 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2154 2158
2155 2159 if fail_early:
2156 2160 return merge_check
2157 2161
2158 2162 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2159 2163 return merge_check
2160 2164
2161 2165 @classmethod
2162 2166 def get_merge_conditions(cls, pull_request, translator):
2163 2167 _ = translator
2164 2168 merge_details = {}
2165 2169
2166 2170 model = PullRequestModel()
2167 2171 use_rebase = model._use_rebase_for_merging(pull_request)
2168 2172
2169 2173 if use_rebase:
2170 2174 merge_details['merge_strategy'] = dict(
2171 2175 details={},
2172 2176 message=_('Merge strategy: rebase')
2173 2177 )
2174 2178 else:
2175 2179 merge_details['merge_strategy'] = dict(
2176 2180 details={},
2177 2181 message=_('Merge strategy: explicit merge commit')
2178 2182 )
2179 2183
2180 2184 close_branch = model._close_branch_before_merging(pull_request)
2181 2185 if close_branch:
2182 2186 repo_type = pull_request.target_repo.repo_type
2183 2187 close_msg = ''
2184 2188 if repo_type == 'hg':
2185 2189 close_msg = _('Source branch will be closed before the merge.')
2186 2190 elif repo_type == 'git':
2187 2191 close_msg = _('Source branch will be deleted after the merge.')
2188 2192
2189 2193 merge_details['close_branch'] = dict(
2190 2194 details={},
2191 2195 message=close_msg
2192 2196 )
2193 2197
2194 2198 return merge_details
2195 2199
2196 2200
2197 2201 ChangeTuple = collections.namedtuple(
2198 2202 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2199 2203
2200 2204 FileChangeTuple = collections.namedtuple(
2201 2205 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now