##// END OF EJS Templates
pull-requests: fix potential crash on providing a wrong order-by type column.
super-admin -
r4716:09a1d944 default
parent child Browse files
Show More
@@ -1,2378 +1,2379 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 log.debug('Obtaining commit authors from set of commits')
158 158 _compare_data = target_scm.compare(
159 159 target_ref, source_ref, source_scm, merge=True,
160 160 pre_load=["author", "date", "message"]
161 161 )
162 162
163 163 for commit in _compare_data:
164 164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 165 # at this function which is later called via JSON serialization
166 166 serialized_commit = dict(
167 167 author=commit.author,
168 168 date=commit.date,
169 169 message=commit.message,
170 170 commit_id=commit.raw_id,
171 171 raw_id=commit.raw_id
172 172 )
173 173 commits.append(serialized_commit)
174 174 user = User.get_from_cs_author(serialized_commit['author'])
175 175 if user and user not in commit_authors:
176 176 commit_authors.append(user)
177 177
178 178 # lines
179 179 if get_authors:
180 180 log.debug('Calculating authors of changed files')
181 181 target_commit = source_repo.get_commit(ancestor_id)
182 182
183 183 for fname, lines in changed_lines.items():
184 184
185 185 try:
186 186 node = target_commit.get_node(fname, pre_load=["is_binary"])
187 187 except Exception:
188 188 log.exception("Failed to load node with path %s", fname)
189 189 continue
190 190
191 191 if not isinstance(node, FileNode):
192 192 continue
193 193
194 194 # NOTE(marcink): for binary node we don't do annotation, just use last author
195 195 if node.is_binary:
196 196 author = node.last_commit.author
197 197 email = node.last_commit.author_email
198 198
199 199 user = User.get_from_cs_author(author)
200 200 if user:
201 201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
202 202 author_counts[author] = author_counts.get(author, 0) + 1
203 203 email_counts[email] = email_counts.get(email, 0) + 1
204 204
205 205 continue
206 206
207 207 for annotation in node.annotate:
208 208 line_no, commit_id, get_commit_func, line_text = annotation
209 209 if line_no in lines:
210 210 if commit_id not in _commit_cache:
211 211 _commit_cache[commit_id] = get_commit_func()
212 212 commit = _commit_cache[commit_id]
213 213 author = commit.author
214 214 email = commit.author_email
215 215 user = User.get_from_cs_author(author)
216 216 if user:
217 217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
218 218 author_counts[author] = author_counts.get(author, 0) + 1
219 219 email_counts[email] = email_counts.get(email, 0) + 1
220 220
221 221 log.debug('Default reviewers processing finished')
222 222
223 223 return {
224 224 'commits': commits,
225 225 'files': all_files_changes,
226 226 'stats': stats,
227 227 'ancestor': ancestor_id,
228 228 # original authors of modified files
229 229 'original_authors': {
230 230 'users': user_counts,
231 231 'authors': author_counts,
232 232 'emails': email_counts,
233 233 },
234 234 'commit_authors': commit_authors
235 235 }
236 236
237 237
238 238 class PullRequestModel(BaseModel):
239 239
240 240 cls = PullRequest
241 241
242 242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
243 243
244 244 UPDATE_STATUS_MESSAGES = {
245 245 UpdateFailureReason.NONE: lazy_ugettext(
246 246 'Pull request update successful.'),
247 247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
248 248 'Pull request update failed because of an unknown error.'),
249 249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
250 250 'No update needed because the source and target have not changed.'),
251 251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
252 252 'Pull request cannot be updated because the reference type is '
253 253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
254 254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
255 255 'This pull request cannot be updated because the target '
256 256 'reference is missing.'),
257 257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
258 258 'This pull request cannot be updated because the source '
259 259 'reference is missing.'),
260 260 }
261 261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
262 262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
263 263
264 264 def __get_pull_request(self, pull_request):
265 265 return self._get_instance((
266 266 PullRequest, PullRequestVersion), pull_request)
267 267
268 268 def _check_perms(self, perms, pull_request, user, api=False):
269 269 if not api:
270 270 return h.HasRepoPermissionAny(*perms)(
271 271 user=user, repo_name=pull_request.target_repo.repo_name)
272 272 else:
273 273 return h.HasRepoPermissionAnyApi(*perms)(
274 274 user=user, repo_name=pull_request.target_repo.repo_name)
275 275
276 276 def check_user_read(self, pull_request, user, api=False):
277 277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
278 278 return self._check_perms(_perms, pull_request, user, api)
279 279
280 280 def check_user_merge(self, pull_request, user, api=False):
281 281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
282 282 return self._check_perms(_perms, pull_request, user, api)
283 283
284 284 def check_user_update(self, pull_request, user, api=False):
285 285 owner = user.user_id == pull_request.user_id
286 286 return self.check_user_merge(pull_request, user, api) or owner
287 287
288 288 def check_user_delete(self, pull_request, user):
289 289 owner = user.user_id == pull_request.user_id
290 290 _perms = ('repository.admin',)
291 291 return self._check_perms(_perms, pull_request, user) or owner
292 292
293 293 def is_user_reviewer(self, pull_request, user):
294 294 return user.user_id in [
295 295 x.user_id for x in
296 296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
297 297 if x.user
298 298 ]
299 299
300 300 def check_user_change_status(self, pull_request, user, api=False):
301 301 return self.check_user_update(pull_request, user, api) \
302 302 or self.is_user_reviewer(pull_request, user)
303 303
304 304 def check_user_comment(self, pull_request, user):
305 305 owner = user.user_id == pull_request.user_id
306 306 return self.check_user_read(pull_request, user) or owner
307 307
308 308 def get(self, pull_request):
309 309 return self.__get_pull_request(pull_request)
310 310
311 311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
312 312 statuses=None, opened_by=None, order_by=None,
313 313 order_dir='desc', only_created=False):
314 314 repo = None
315 315 if repo_name:
316 316 repo = self._get_repo(repo_name)
317 317
318 318 q = PullRequest.query()
319 319
320 320 if search_q:
321 321 like_expression = u'%{}%'.format(safe_unicode(search_q))
322 322 q = q.join(User, User.user_id == PullRequest.user_id)
323 323 q = q.filter(or_(
324 324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
325 325 User.username.ilike(like_expression),
326 326 PullRequest.title.ilike(like_expression),
327 327 PullRequest.description.ilike(like_expression),
328 328 ))
329 329
330 330 # source or target
331 331 if repo and source:
332 332 q = q.filter(PullRequest.source_repo == repo)
333 333 elif repo:
334 334 q = q.filter(PullRequest.target_repo == repo)
335 335
336 336 # closed,opened
337 337 if statuses:
338 338 q = q.filter(PullRequest.status.in_(statuses))
339 339
340 340 # opened by filter
341 341 if opened_by:
342 342 q = q.filter(PullRequest.user_id.in_(opened_by))
343 343
344 344 # only get those that are in "created" state
345 345 if only_created:
346 346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347 347
348 if order_by:
349 order_map = {
350 'name_raw': PullRequest.pull_request_id,
351 'id': PullRequest.pull_request_id,
352 'title': PullRequest.title,
353 'updated_on_raw': PullRequest.updated_on,
354 'target_repo': PullRequest.target_repo_id
355 }
348 order_map = {
349 'name_raw': PullRequest.pull_request_id,
350 'id': PullRequest.pull_request_id,
351 'title': PullRequest.title,
352 'updated_on_raw': PullRequest.updated_on,
353 'target_repo': PullRequest.target_repo_id
354 }
355 if order_by and order_by in order_map:
356 356 if order_dir == 'asc':
357 357 q = q.order_by(order_map[order_by].asc())
358 358 else:
359 359 q = q.order_by(order_map[order_by].desc())
360 360
361 361 return q
362 362
363 363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
364 364 opened_by=None):
365 365 """
366 366 Count the number of pull requests for a specific repository.
367 367
368 368 :param repo_name: target or source repo
369 369 :param search_q: filter by text
370 370 :param source: boolean flag to specify if repo_name refers to source
371 371 :param statuses: list of pull request statuses
372 372 :param opened_by: author user of the pull request
373 373 :returns: int number of pull requests
374 374 """
375 375 q = self._prepare_get_all_query(
376 376 repo_name, search_q=search_q, source=source, statuses=statuses,
377 377 opened_by=opened_by)
378 378
379 379 return q.count()
380 380
381 381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
382 382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
383 383 """
384 384 Get all pull requests for a specific repository.
385 385
386 386 :param repo_name: target or source repo
387 387 :param search_q: filter by text
388 388 :param source: boolean flag to specify if repo_name refers to source
389 389 :param statuses: list of pull request statuses
390 390 :param opened_by: author user of the pull request
391 391 :param offset: pagination offset
392 392 :param length: length of returned list
393 393 :param order_by: order of the returned list
394 394 :param order_dir: 'asc' or 'desc' ordering direction
395 395 :returns: list of pull requests
396 396 """
397 397 q = self._prepare_get_all_query(
398 398 repo_name, search_q=search_q, source=source, statuses=statuses,
399 399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
400 400
401 401 if length:
402 402 pull_requests = q.limit(length).offset(offset).all()
403 403 else:
404 404 pull_requests = q.all()
405 405
406 406 return pull_requests
407 407
408 408 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
409 409 """
410 410 Count the number of pull requests for a specific repository that are
411 411 awaiting review.
412 412
413 413 :param repo_name: target or source repo
414 414 :param search_q: filter by text
415 415 :param statuses: list of pull request statuses
416 416 :returns: int number of pull requests
417 417 """
418 418 pull_requests = self.get_awaiting_review(
419 419 repo_name, search_q=search_q, statuses=statuses)
420 420
421 421 return len(pull_requests)
422 422
423 423 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
424 424 offset=0, length=None, order_by=None, order_dir='desc'):
425 425 """
426 426 Get all pull requests for a specific repository that are awaiting
427 427 review.
428 428
429 429 :param repo_name: target or source repo
430 430 :param search_q: filter by text
431 431 :param statuses: list of pull request statuses
432 432 :param offset: pagination offset
433 433 :param length: length of returned list
434 434 :param order_by: order of the returned list
435 435 :param order_dir: 'asc' or 'desc' ordering direction
436 436 :returns: list of pull requests
437 437 """
438 438 pull_requests = self.get_all(
439 439 repo_name, search_q=search_q, statuses=statuses,
440 440 order_by=order_by, order_dir=order_dir)
441 441
442 442 _filtered_pull_requests = []
443 443 for pr in pull_requests:
444 444 status = pr.calculated_review_status()
445 445 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
446 446 ChangesetStatus.STATUS_UNDER_REVIEW]:
447 447 _filtered_pull_requests.append(pr)
448 448 if length:
449 449 return _filtered_pull_requests[offset:offset+length]
450 450 else:
451 451 return _filtered_pull_requests
452 452
453 453 def _prepare_awaiting_my_review_review_query(
454 454 self, repo_name, user_id, search_q=None, statuses=None,
455 455 order_by=None, order_dir='desc'):
456 456
457 457 for_review_statuses = [
458 458 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
459 459 ]
460 460
461 461 pull_request_alias = aliased(PullRequest)
462 462 status_alias = aliased(ChangesetStatus)
463 463 reviewers_alias = aliased(PullRequestReviewers)
464 464 repo_alias = aliased(Repository)
465 465
466 466 last_ver_subq = Session()\
467 467 .query(func.min(ChangesetStatus.version)) \
468 468 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
469 469 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
470 470 .subquery()
471 471
472 472 q = Session().query(pull_request_alias) \
473 473 .options(lazyload(pull_request_alias.author)) \
474 474 .join(reviewers_alias,
475 475 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
476 476 .join(repo_alias,
477 477 repo_alias.repo_id == pull_request_alias.target_repo_id) \
478 478 .outerjoin(status_alias,
479 479 and_(status_alias.user_id == reviewers_alias.user_id,
480 480 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
481 481 .filter(or_(status_alias.version == null(),
482 482 status_alias.version == last_ver_subq)) \
483 483 .filter(reviewers_alias.user_id == user_id) \
484 484 .filter(repo_alias.repo_name == repo_name) \
485 485 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
486 486 .group_by(pull_request_alias)
487 487
488 488 # closed,opened
489 489 if statuses:
490 490 q = q.filter(pull_request_alias.status.in_(statuses))
491 491
492 492 if search_q:
493 493 like_expression = u'%{}%'.format(safe_unicode(search_q))
494 494 q = q.join(User, User.user_id == pull_request_alias.user_id)
495 495 q = q.filter(or_(
496 496 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
497 497 User.username.ilike(like_expression),
498 498 pull_request_alias.title.ilike(like_expression),
499 499 pull_request_alias.description.ilike(like_expression),
500 500 ))
501 501
502 if order_by:
503 order_map = {
504 'name_raw': pull_request_alias.pull_request_id,
505 'title': pull_request_alias.title,
506 'updated_on_raw': pull_request_alias.updated_on,
507 'target_repo': pull_request_alias.target_repo_id
508 }
502 order_map = {
503 'name_raw': pull_request_alias.pull_request_id,
504 'title': pull_request_alias.title,
505 'updated_on_raw': pull_request_alias.updated_on,
506 'target_repo': pull_request_alias.target_repo_id
507 }
508 if order_by and order_by in order_map:
509 509 if order_dir == 'asc':
510 510 q = q.order_by(order_map[order_by].asc())
511 511 else:
512 512 q = q.order_by(order_map[order_by].desc())
513 513
514 514 return q
515 515
516 516 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
517 517 """
518 518 Count the number of pull requests for a specific repository that are
519 519 awaiting review from a specific user.
520 520
521 521 :param repo_name: target or source repo
522 522 :param user_id: reviewer user of the pull request
523 523 :param search_q: filter by text
524 524 :param statuses: list of pull request statuses
525 525 :returns: int number of pull requests
526 526 """
527 527 q = self._prepare_awaiting_my_review_review_query(
528 528 repo_name, user_id, search_q=search_q, statuses=statuses)
529 529 return q.count()
530 530
531 531 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
532 532 offset=0, length=None, order_by=None, order_dir='desc'):
533 533 """
534 534 Get all pull requests for a specific repository that are awaiting
535 535 review from a specific user.
536 536
537 537 :param repo_name: target or source repo
538 538 :param user_id: reviewer user of the pull request
539 539 :param search_q: filter by text
540 540 :param statuses: list of pull request statuses
541 541 :param offset: pagination offset
542 542 :param length: length of returned list
543 543 :param order_by: order of the returned list
544 544 :param order_dir: 'asc' or 'desc' ordering direction
545 545 :returns: list of pull requests
546 546 """
547 547
548 548 q = self._prepare_awaiting_my_review_review_query(
549 549 repo_name, user_id, search_q=search_q, statuses=statuses,
550 550 order_by=order_by, order_dir=order_dir)
551 551
552 552 if length:
553 553 pull_requests = q.limit(length).offset(offset).all()
554 554 else:
555 555 pull_requests = q.all()
556 556
557 557 return pull_requests
558 558
559 559 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
560 560 order_by=None, order_dir='desc'):
561 561 """
562 562 return a query of pull-requests user is an creator, or he's added as a reviewer
563 563 """
564 564 q = PullRequest.query()
565 565 if user_id:
566 566 reviewers_subquery = Session().query(
567 567 PullRequestReviewers.pull_request_id).filter(
568 568 PullRequestReviewers.user_id == user_id).subquery()
569 569 user_filter = or_(
570 570 PullRequest.user_id == user_id,
571 571 PullRequest.pull_request_id.in_(reviewers_subquery)
572 572 )
573 573 q = PullRequest.query().filter(user_filter)
574 574
575 575 # closed,opened
576 576 if statuses:
577 577 q = q.filter(PullRequest.status.in_(statuses))
578 578
579 579 if query:
580 580 like_expression = u'%{}%'.format(safe_unicode(query))
581 581 q = q.join(User, User.user_id == PullRequest.user_id)
582 582 q = q.filter(or_(
583 583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
584 584 User.username.ilike(like_expression),
585 585 PullRequest.title.ilike(like_expression),
586 586 PullRequest.description.ilike(like_expression),
587 587 ))
588 if order_by:
589 order_map = {
590 'name_raw': PullRequest.pull_request_id,
591 'title': PullRequest.title,
592 'updated_on_raw': PullRequest.updated_on,
593 'target_repo': PullRequest.target_repo_id
594 }
588
589 order_map = {
590 'name_raw': PullRequest.pull_request_id,
591 'title': PullRequest.title,
592 'updated_on_raw': PullRequest.updated_on,
593 'target_repo': PullRequest.target_repo_id
594 }
595 if order_by and order_by in order_map:
595 596 if order_dir == 'asc':
596 597 q = q.order_by(order_map[order_by].asc())
597 598 else:
598 599 q = q.order_by(order_map[order_by].desc())
599 600
600 601 return q
601 602
602 603 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 604 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 605 return q.count()
605 606
606 607 def get_im_participating_in(
607 608 self, user_id=None, statuses=None, query='', offset=0,
608 609 length=None, order_by=None, order_dir='desc'):
609 610 """
610 611 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 612 """
612 613
613 614 q = self._prepare_im_participating_query(
614 615 user_id, statuses=statuses, query=query, order_by=order_by,
615 616 order_dir=order_dir)
616 617
617 618 if length:
618 619 pull_requests = q.limit(length).offset(offset).all()
619 620 else:
620 621 pull_requests = q.all()
621 622
622 623 return pull_requests
623 624
624 625 def _prepare_participating_in_for_review_query(
625 626 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626 627
627 628 for_review_statuses = [
628 629 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 630 ]
630 631
631 632 pull_request_alias = aliased(PullRequest)
632 633 status_alias = aliased(ChangesetStatus)
633 634 reviewers_alias = aliased(PullRequestReviewers)
634 635
635 636 last_ver_subq = Session()\
636 637 .query(func.min(ChangesetStatus.version)) \
637 638 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 639 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 640 .subquery()
640 641
641 642 q = Session().query(pull_request_alias) \
642 643 .options(lazyload(pull_request_alias.author)) \
643 644 .join(reviewers_alias,
644 645 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 646 .outerjoin(status_alias,
646 647 and_(status_alias.user_id == reviewers_alias.user_id,
647 648 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 649 .filter(or_(status_alias.version == null(),
649 650 status_alias.version == last_ver_subq)) \
650 651 .filter(reviewers_alias.user_id == user_id) \
651 652 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 653 .group_by(pull_request_alias)
653 654
654 655 # closed,opened
655 656 if statuses:
656 657 q = q.filter(pull_request_alias.status.in_(statuses))
657 658
658 659 if query:
659 660 like_expression = u'%{}%'.format(safe_unicode(query))
660 661 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 662 q = q.filter(or_(
662 663 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 664 User.username.ilike(like_expression),
664 665 pull_request_alias.title.ilike(like_expression),
665 666 pull_request_alias.description.ilike(like_expression),
666 667 ))
667 668
668 if order_by:
669 order_map = {
670 'name_raw': pull_request_alias.pull_request_id,
671 'title': pull_request_alias.title,
672 'updated_on_raw': pull_request_alias.updated_on,
673 'target_repo': pull_request_alias.target_repo_id
674 }
669 order_map = {
670 'name_raw': pull_request_alias.pull_request_id,
671 'title': pull_request_alias.title,
672 'updated_on_raw': pull_request_alias.updated_on,
673 'target_repo': pull_request_alias.target_repo_id
674 }
675 if order_by and order_by in order_map:
675 676 if order_dir == 'asc':
676 677 q = q.order_by(order_map[order_by].asc())
677 678 else:
678 679 q = q.order_by(order_map[order_by].desc())
679 680
680 681 return q
681 682
682 683 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 684 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 685 return q.count()
685 686
686 687 def get_im_participating_in_for_review(
687 688 self, user_id, statuses=None, query='', offset=0,
688 689 length=None, order_by=None, order_dir='desc'):
689 690 """
690 691 Get all Pull requests that needs user approval or rejection
691 692 """
692 693
693 694 q = self._prepare_participating_in_for_review_query(
694 695 user_id, statuses=statuses, query=query, order_by=order_by,
695 696 order_dir=order_dir)
696 697
697 698 if length:
698 699 pull_requests = q.limit(length).offset(offset).all()
699 700 else:
700 701 pull_requests = q.all()
701 702
702 703 return pull_requests
703 704
704 705 def get_versions(self, pull_request):
705 706 """
706 707 returns version of pull request sorted by ID descending
707 708 """
708 709 return PullRequestVersion.query()\
709 710 .filter(PullRequestVersion.pull_request == pull_request)\
710 711 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 712 .all()
712 713
713 714 def get_pr_version(self, pull_request_id, version=None):
714 715 at_version = None
715 716
716 717 if version and version == 'latest':
717 718 pull_request_ver = PullRequest.get(pull_request_id)
718 719 pull_request_obj = pull_request_ver
719 720 _org_pull_request_obj = pull_request_obj
720 721 at_version = 'latest'
721 722 elif version:
722 723 pull_request_ver = PullRequestVersion.get_or_404(version)
723 724 pull_request_obj = pull_request_ver
724 725 _org_pull_request_obj = pull_request_ver.pull_request
725 726 at_version = pull_request_ver.pull_request_version_id
726 727 else:
727 728 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 729 pull_request_id)
729 730
730 731 pull_request_display_obj = PullRequest.get_pr_display_object(
731 732 pull_request_obj, _org_pull_request_obj)
732 733
733 734 return _org_pull_request_obj, pull_request_obj, \
734 735 pull_request_display_obj, at_version
735 736
736 737 def pr_commits_versions(self, versions):
737 738 """
738 739 Maps the pull-request commits into all known PR versions. This way we can obtain
739 740 each pr version the commit was introduced in.
740 741 """
741 742 commit_versions = collections.defaultdict(list)
742 743 num_versions = [x.pull_request_version_id for x in versions]
743 744 for ver in versions:
744 745 for commit_id in ver.revisions:
745 746 ver_idx = ChangesetComment.get_index_from_version(
746 747 ver.pull_request_version_id, num_versions=num_versions)
747 748 commit_versions[commit_id].append(ver_idx)
748 749 return commit_versions
749 750
750 751 def create(self, created_by, source_repo, source_ref, target_repo,
751 752 target_ref, revisions, reviewers, observers, title, description=None,
752 753 common_ancestor_id=None,
753 754 description_renderer=None,
754 755 reviewer_data=None, translator=None, auth_user=None):
755 756 translator = translator or get_current_request().translate
756 757
757 758 created_by_user = self._get_user(created_by)
758 759 auth_user = auth_user or created_by_user.AuthUser()
759 760 source_repo = self._get_repo(source_repo)
760 761 target_repo = self._get_repo(target_repo)
761 762
762 763 pull_request = PullRequest()
763 764 pull_request.source_repo = source_repo
764 765 pull_request.source_ref = source_ref
765 766 pull_request.target_repo = target_repo
766 767 pull_request.target_ref = target_ref
767 768 pull_request.revisions = revisions
768 769 pull_request.title = title
769 770 pull_request.description = description
770 771 pull_request.description_renderer = description_renderer
771 772 pull_request.author = created_by_user
772 773 pull_request.reviewer_data = reviewer_data
773 774 pull_request.pull_request_state = pull_request.STATE_CREATING
774 775 pull_request.common_ancestor_id = common_ancestor_id
775 776
776 777 Session().add(pull_request)
777 778 Session().flush()
778 779
779 780 reviewer_ids = set()
780 781 # members / reviewers
781 782 for reviewer_object in reviewers:
782 783 user_id, reasons, mandatory, role, rules = reviewer_object
783 784 user = self._get_user(user_id)
784 785
785 786 # skip duplicates
786 787 if user.user_id in reviewer_ids:
787 788 continue
788 789
789 790 reviewer_ids.add(user.user_id)
790 791
791 792 reviewer = PullRequestReviewers()
792 793 reviewer.user = user
793 794 reviewer.pull_request = pull_request
794 795 reviewer.reasons = reasons
795 796 reviewer.mandatory = mandatory
796 797 reviewer.role = role
797 798
798 799 # NOTE(marcink): pick only first rule for now
799 800 rule_id = list(rules)[0] if rules else None
800 801 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 802 if rule:
802 803 review_group = rule.user_group_vote_rule(user_id)
803 804 # we check if this particular reviewer is member of a voting group
804 805 if review_group:
805 806 # NOTE(marcink):
806 807 # can be that user is member of more but we pick the first same,
807 808 # same as default reviewers algo
808 809 review_group = review_group[0]
809 810
810 811 rule_data = {
811 812 'rule_name':
812 813 rule.review_rule_name,
813 814 'rule_user_group_entry_id':
814 815 review_group.repo_review_rule_users_group_id,
815 816 'rule_user_group_name':
816 817 review_group.users_group.users_group_name,
817 818 'rule_user_group_members':
818 819 [x.user.username for x in review_group.users_group.members],
819 820 'rule_user_group_members_id':
820 821 [x.user.user_id for x in review_group.users_group.members],
821 822 }
822 823 # e.g {'vote_rule': -1, 'mandatory': True}
823 824 rule_data.update(review_group.rule_data())
824 825
825 826 reviewer.rule_data = rule_data
826 827
827 828 Session().add(reviewer)
828 829 Session().flush()
829 830
830 831 for observer_object in observers:
831 832 user_id, reasons, mandatory, role, rules = observer_object
832 833 user = self._get_user(user_id)
833 834
834 835 # skip duplicates from reviewers
835 836 if user.user_id in reviewer_ids:
836 837 continue
837 838
838 839 #reviewer_ids.add(user.user_id)
839 840
840 841 observer = PullRequestReviewers()
841 842 observer.user = user
842 843 observer.pull_request = pull_request
843 844 observer.reasons = reasons
844 845 observer.mandatory = mandatory
845 846 observer.role = role
846 847
847 848 # NOTE(marcink): pick only first rule for now
848 849 rule_id = list(rules)[0] if rules else None
849 850 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 851 if rule:
851 852 # TODO(marcink): do we need this for observers ??
852 853 pass
853 854
854 855 Session().add(observer)
855 856 Session().flush()
856 857
857 858 # Set approval status to "Under Review" for all commits which are
858 859 # part of this pull request.
859 860 ChangesetStatusModel().set_status(
860 861 repo=target_repo,
861 862 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 863 user=created_by_user,
863 864 pull_request=pull_request
864 865 )
865 866 # we commit early at this point. This has to do with a fact
866 867 # that before queries do some row-locking. And because of that
867 868 # we need to commit and finish transaction before below validate call
868 869 # that for large repos could be long resulting in long row locks
869 870 Session().commit()
870 871
871 872 # prepare workspace, and run initial merge simulation. Set state during that
872 873 # operation
873 874 pull_request = PullRequest.get(pull_request.pull_request_id)
874 875
875 876 # set as merging, for merge simulation, and if finished to created so we mark
876 877 # simulation is working fine
877 878 with pull_request.set_state(PullRequest.STATE_MERGING,
878 879 final_state=PullRequest.STATE_CREATED) as state_obj:
879 880 MergeCheck.validate(
880 881 pull_request, auth_user=auth_user, translator=translator)
881 882
882 883 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 884 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884 885
885 886 creation_data = pull_request.get_api_data(with_merge_state=False)
886 887 self._log_audit_action(
887 888 'repo.pull_request.create', {'data': creation_data},
888 889 auth_user, pull_request)
889 890
890 891 return pull_request
891 892
892 893 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 894 pull_request = self.__get_pull_request(pull_request)
894 895 target_scm = pull_request.target_repo.scm_instance()
895 896 if action == 'create':
896 897 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 898 elif action == 'merge':
898 899 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 900 elif action == 'close':
900 901 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 902 elif action == 'review_status_change':
902 903 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 904 elif action == 'update':
904 905 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 906 elif action == 'comment':
906 907 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 908 elif action == 'comment_edit':
908 909 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 910 else:
910 911 return
911 912
912 913 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 914 pull_request, action, trigger_hook)
914 915 trigger_hook(
915 916 username=user.username,
916 917 repo_name=pull_request.target_repo.repo_name,
917 918 repo_type=target_scm.alias,
918 919 pull_request=pull_request,
919 920 data=data)
920 921
921 922 def _get_commit_ids(self, pull_request):
922 923 """
923 924 Return the commit ids of the merged pull request.
924 925
925 926 This method is not dealing correctly yet with the lack of autoupdates
926 927 nor with the implicit target updates.
927 928 For example: if a commit in the source repo is already in the target it
928 929 will be reported anyways.
929 930 """
930 931 merge_rev = pull_request.merge_rev
931 932 if merge_rev is None:
932 933 raise ValueError('This pull request was not merged yet')
933 934
934 935 commit_ids = list(pull_request.revisions)
935 936 if merge_rev not in commit_ids:
936 937 commit_ids.append(merge_rev)
937 938
938 939 return commit_ids
939 940
940 941 def merge_repo(self, pull_request, user, extras):
941 942 log.debug("Merging pull request %s", pull_request.pull_request_id)
942 943 extras['user_agent'] = 'internal-merge'
943 944 merge_state = self._merge_pull_request(pull_request, user, extras)
944 945 if merge_state.executed:
945 946 log.debug("Merge was successful, updating the pull request comments.")
946 947 self._comment_and_close_pr(pull_request, user, merge_state)
947 948
948 949 self._log_audit_action(
949 950 'repo.pull_request.merge',
950 951 {'merge_state': merge_state.__dict__},
951 952 user, pull_request)
952 953
953 954 else:
954 955 log.warn("Merge failed, not updating the pull request.")
955 956 return merge_state
956 957
957 958 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
958 959 target_vcs = pull_request.target_repo.scm_instance()
959 960 source_vcs = pull_request.source_repo.scm_instance()
960 961
961 962 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
962 963 pr_id=pull_request.pull_request_id,
963 964 pr_title=pull_request.title,
964 965 source_repo=source_vcs.name,
965 966 source_ref_name=pull_request.source_ref_parts.name,
966 967 target_repo=target_vcs.name,
967 968 target_ref_name=pull_request.target_ref_parts.name,
968 969 )
969 970
970 971 workspace_id = self._workspace_id(pull_request)
971 972 repo_id = pull_request.target_repo.repo_id
972 973 use_rebase = self._use_rebase_for_merging(pull_request)
973 974 close_branch = self._close_branch_before_merging(pull_request)
974 975 user_name = self._user_name_for_merging(pull_request, user)
975 976
976 977 target_ref = self._refresh_reference(
977 978 pull_request.target_ref_parts, target_vcs)
978 979
979 980 callback_daemon, extras = prepare_callback_daemon(
980 981 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
981 982 host=vcs_settings.HOOKS_HOST,
982 983 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
983 984
984 985 with callback_daemon:
985 986 # TODO: johbo: Implement a clean way to run a config_override
986 987 # for a single call.
987 988 target_vcs.config.set(
988 989 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
989 990
990 991 merge_state = target_vcs.merge(
991 992 repo_id, workspace_id, target_ref, source_vcs,
992 993 pull_request.source_ref_parts,
993 994 user_name=user_name, user_email=user.email,
994 995 message=message, use_rebase=use_rebase,
995 996 close_branch=close_branch)
996 997 return merge_state
997 998
998 999 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
999 1000 pull_request.merge_rev = merge_state.merge_ref.commit_id
1000 1001 pull_request.updated_on = datetime.datetime.now()
1001 1002 close_msg = close_msg or 'Pull request merged and closed'
1002 1003
1003 1004 CommentsModel().create(
1004 1005 text=safe_unicode(close_msg),
1005 1006 repo=pull_request.target_repo.repo_id,
1006 1007 user=user.user_id,
1007 1008 pull_request=pull_request.pull_request_id,
1008 1009 f_path=None,
1009 1010 line_no=None,
1010 1011 closing_pr=True
1011 1012 )
1012 1013
1013 1014 Session().add(pull_request)
1014 1015 Session().flush()
1015 1016 # TODO: paris: replace invalidation with less radical solution
1016 1017 ScmModel().mark_for_invalidation(
1017 1018 pull_request.target_repo.repo_name)
1018 1019 self.trigger_pull_request_hook(pull_request, user, 'merge')
1019 1020
1020 1021 def has_valid_update_type(self, pull_request):
1021 1022 source_ref_type = pull_request.source_ref_parts.type
1022 1023 return source_ref_type in self.REF_TYPES
1023 1024
1024 1025 def get_flow_commits(self, pull_request):
1025 1026
1026 1027 # source repo
1027 1028 source_ref_name = pull_request.source_ref_parts.name
1028 1029 source_ref_type = pull_request.source_ref_parts.type
1029 1030 source_ref_id = pull_request.source_ref_parts.commit_id
1030 1031 source_repo = pull_request.source_repo.scm_instance()
1031 1032
1032 1033 try:
1033 1034 if source_ref_type in self.REF_TYPES:
1034 1035 source_commit = source_repo.get_commit(
1035 1036 source_ref_name, reference_obj=pull_request.source_ref_parts)
1036 1037 else:
1037 1038 source_commit = source_repo.get_commit(source_ref_id)
1038 1039 except CommitDoesNotExistError:
1039 1040 raise SourceRefMissing()
1040 1041
1041 1042 # target repo
1042 1043 target_ref_name = pull_request.target_ref_parts.name
1043 1044 target_ref_type = pull_request.target_ref_parts.type
1044 1045 target_ref_id = pull_request.target_ref_parts.commit_id
1045 1046 target_repo = pull_request.target_repo.scm_instance()
1046 1047
1047 1048 try:
1048 1049 if target_ref_type in self.REF_TYPES:
1049 1050 target_commit = target_repo.get_commit(
1050 1051 target_ref_name, reference_obj=pull_request.target_ref_parts)
1051 1052 else:
1052 1053 target_commit = target_repo.get_commit(target_ref_id)
1053 1054 except CommitDoesNotExistError:
1054 1055 raise TargetRefMissing()
1055 1056
1056 1057 return source_commit, target_commit
1057 1058
1058 1059 def update_commits(self, pull_request, updating_user):
1059 1060 """
1060 1061 Get the updated list of commits for the pull request
1061 1062 and return the new pull request version and the list
1062 1063 of commits processed by this update action
1063 1064
1064 1065 updating_user is the user_object who triggered the update
1065 1066 """
1066 1067 pull_request = self.__get_pull_request(pull_request)
1067 1068 source_ref_type = pull_request.source_ref_parts.type
1068 1069 source_ref_name = pull_request.source_ref_parts.name
1069 1070 source_ref_id = pull_request.source_ref_parts.commit_id
1070 1071
1071 1072 target_ref_type = pull_request.target_ref_parts.type
1072 1073 target_ref_name = pull_request.target_ref_parts.name
1073 1074 target_ref_id = pull_request.target_ref_parts.commit_id
1074 1075
1075 1076 if not self.has_valid_update_type(pull_request):
1076 1077 log.debug("Skipping update of pull request %s due to ref type: %s",
1077 1078 pull_request, source_ref_type)
1078 1079 return UpdateResponse(
1079 1080 executed=False,
1080 1081 reason=UpdateFailureReason.WRONG_REF_TYPE,
1081 1082 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1082 1083 source_changed=False, target_changed=False)
1083 1084
1084 1085 try:
1085 1086 source_commit, target_commit = self.get_flow_commits(pull_request)
1086 1087 except SourceRefMissing:
1087 1088 return UpdateResponse(
1088 1089 executed=False,
1089 1090 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1090 1091 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1091 1092 source_changed=False, target_changed=False)
1092 1093 except TargetRefMissing:
1093 1094 return UpdateResponse(
1094 1095 executed=False,
1095 1096 reason=UpdateFailureReason.MISSING_TARGET_REF,
1096 1097 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1097 1098 source_changed=False, target_changed=False)
1098 1099
1099 1100 source_changed = source_ref_id != source_commit.raw_id
1100 1101 target_changed = target_ref_id != target_commit.raw_id
1101 1102
1102 1103 if not (source_changed or target_changed):
1103 1104 log.debug("Nothing changed in pull request %s", pull_request)
1104 1105 return UpdateResponse(
1105 1106 executed=False,
1106 1107 reason=UpdateFailureReason.NO_CHANGE,
1107 1108 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1108 1109 source_changed=target_changed, target_changed=source_changed)
1109 1110
1110 1111 change_in_found = 'target repo' if target_changed else 'source repo'
1111 1112 log.debug('Updating pull request because of change in %s detected',
1112 1113 change_in_found)
1113 1114
1114 1115 # Finally there is a need for an update, in case of source change
1115 1116 # we create a new version, else just an update
1116 1117 if source_changed:
1117 1118 pull_request_version = self._create_version_from_snapshot(pull_request)
1118 1119 self._link_comments_to_version(pull_request_version)
1119 1120 else:
1120 1121 try:
1121 1122 ver = pull_request.versions[-1]
1122 1123 except IndexError:
1123 1124 ver = None
1124 1125
1125 1126 pull_request.pull_request_version_id = \
1126 1127 ver.pull_request_version_id if ver else None
1127 1128 pull_request_version = pull_request
1128 1129
1129 1130 source_repo = pull_request.source_repo.scm_instance()
1130 1131 target_repo = pull_request.target_repo.scm_instance()
1131 1132
1132 1133 # re-compute commit ids
1133 1134 old_commit_ids = pull_request.revisions
1134 1135 pre_load = ["author", "date", "message", "branch"]
1135 1136 commit_ranges = target_repo.compare(
1136 1137 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1137 1138 pre_load=pre_load)
1138 1139
1139 1140 target_ref = target_commit.raw_id
1140 1141 source_ref = source_commit.raw_id
1141 1142 ancestor_commit_id = target_repo.get_common_ancestor(
1142 1143 target_ref, source_ref, source_repo)
1143 1144
1144 1145 if not ancestor_commit_id:
1145 1146 raise ValueError(
1146 1147 'cannot calculate diff info without a common ancestor. '
1147 1148 'Make sure both repositories are related, and have a common forking commit.')
1148 1149
1149 1150 pull_request.common_ancestor_id = ancestor_commit_id
1150 1151
1151 1152 pull_request.source_ref = '%s:%s:%s' % (
1152 1153 source_ref_type, source_ref_name, source_commit.raw_id)
1153 1154 pull_request.target_ref = '%s:%s:%s' % (
1154 1155 target_ref_type, target_ref_name, ancestor_commit_id)
1155 1156
1156 1157 pull_request.revisions = [
1157 1158 commit.raw_id for commit in reversed(commit_ranges)]
1158 1159 pull_request.updated_on = datetime.datetime.now()
1159 1160 Session().add(pull_request)
1160 1161 new_commit_ids = pull_request.revisions
1161 1162
1162 1163 old_diff_data, new_diff_data = self._generate_update_diffs(
1163 1164 pull_request, pull_request_version)
1164 1165
1165 1166 # calculate commit and file changes
1166 1167 commit_changes = self._calculate_commit_id_changes(
1167 1168 old_commit_ids, new_commit_ids)
1168 1169 file_changes = self._calculate_file_changes(
1169 1170 old_diff_data, new_diff_data)
1170 1171
1171 1172 # set comments as outdated if DIFFS changed
1172 1173 CommentsModel().outdate_comments(
1173 1174 pull_request, old_diff_data=old_diff_data,
1174 1175 new_diff_data=new_diff_data)
1175 1176
1176 1177 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1177 1178 file_node_changes = (
1178 1179 file_changes.added or file_changes.modified or file_changes.removed)
1179 1180 pr_has_changes = valid_commit_changes or file_node_changes
1180 1181
1181 1182 # Add an automatic comment to the pull request, in case
1182 1183 # anything has changed
1183 1184 if pr_has_changes:
1184 1185 update_comment = CommentsModel().create(
1185 1186 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1186 1187 repo=pull_request.target_repo,
1187 1188 user=pull_request.author,
1188 1189 pull_request=pull_request,
1189 1190 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1190 1191
1191 1192 # Update status to "Under Review" for added commits
1192 1193 for commit_id in commit_changes.added:
1193 1194 ChangesetStatusModel().set_status(
1194 1195 repo=pull_request.source_repo,
1195 1196 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1196 1197 comment=update_comment,
1197 1198 user=pull_request.author,
1198 1199 pull_request=pull_request,
1199 1200 revision=commit_id)
1200 1201
1201 1202 # initial commit
1202 1203 Session().commit()
1203 1204
1204 1205 if pr_has_changes:
1205 1206 # send update email to users
1206 1207 try:
1207 1208 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1208 1209 ancestor_commit_id=ancestor_commit_id,
1209 1210 commit_changes=commit_changes,
1210 1211 file_changes=file_changes)
1211 1212 Session().commit()
1212 1213 except Exception:
1213 1214 log.exception('Failed to send email notification to users')
1214 1215 Session().rollback()
1215 1216
1216 1217 log.debug(
1217 1218 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1218 1219 'removed_ids: %s', pull_request.pull_request_id,
1219 1220 commit_changes.added, commit_changes.common, commit_changes.removed)
1220 1221 log.debug(
1221 1222 'Updated pull request with the following file changes: %s',
1222 1223 file_changes)
1223 1224
1224 1225 log.info(
1225 1226 "Updated pull request %s from commit %s to commit %s, "
1226 1227 "stored new version %s of this pull request.",
1227 1228 pull_request.pull_request_id, source_ref_id,
1228 1229 pull_request.source_ref_parts.commit_id,
1229 1230 pull_request_version.pull_request_version_id)
1230 1231
1231 1232 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1232 1233
1233 1234 return UpdateResponse(
1234 1235 executed=True, reason=UpdateFailureReason.NONE,
1235 1236 old=pull_request, new=pull_request_version,
1236 1237 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1237 1238 source_changed=source_changed, target_changed=target_changed)
1238 1239
1239 1240 def _create_version_from_snapshot(self, pull_request):
1240 1241 version = PullRequestVersion()
1241 1242 version.title = pull_request.title
1242 1243 version.description = pull_request.description
1243 1244 version.status = pull_request.status
1244 1245 version.pull_request_state = pull_request.pull_request_state
1245 1246 version.created_on = datetime.datetime.now()
1246 1247 version.updated_on = pull_request.updated_on
1247 1248 version.user_id = pull_request.user_id
1248 1249 version.source_repo = pull_request.source_repo
1249 1250 version.source_ref = pull_request.source_ref
1250 1251 version.target_repo = pull_request.target_repo
1251 1252 version.target_ref = pull_request.target_ref
1252 1253
1253 1254 version._last_merge_source_rev = pull_request._last_merge_source_rev
1254 1255 version._last_merge_target_rev = pull_request._last_merge_target_rev
1255 1256 version.last_merge_status = pull_request.last_merge_status
1256 1257 version.last_merge_metadata = pull_request.last_merge_metadata
1257 1258 version.shadow_merge_ref = pull_request.shadow_merge_ref
1258 1259 version.merge_rev = pull_request.merge_rev
1259 1260 version.reviewer_data = pull_request.reviewer_data
1260 1261
1261 1262 version.revisions = pull_request.revisions
1262 1263 version.common_ancestor_id = pull_request.common_ancestor_id
1263 1264 version.pull_request = pull_request
1264 1265 Session().add(version)
1265 1266 Session().flush()
1266 1267
1267 1268 return version
1268 1269
1269 1270 def _generate_update_diffs(self, pull_request, pull_request_version):
1270 1271
1271 1272 diff_context = (
1272 1273 self.DIFF_CONTEXT +
1273 1274 CommentsModel.needed_extra_diff_context())
1274 1275 hide_whitespace_changes = False
1275 1276 source_repo = pull_request_version.source_repo
1276 1277 source_ref_id = pull_request_version.source_ref_parts.commit_id
1277 1278 target_ref_id = pull_request_version.target_ref_parts.commit_id
1278 1279 old_diff = self._get_diff_from_pr_or_version(
1279 1280 source_repo, source_ref_id, target_ref_id,
1280 1281 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1281 1282
1282 1283 source_repo = pull_request.source_repo
1283 1284 source_ref_id = pull_request.source_ref_parts.commit_id
1284 1285 target_ref_id = pull_request.target_ref_parts.commit_id
1285 1286
1286 1287 new_diff = self._get_diff_from_pr_or_version(
1287 1288 source_repo, source_ref_id, target_ref_id,
1288 1289 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1289 1290
1290 1291 old_diff_data = diffs.DiffProcessor(old_diff)
1291 1292 old_diff_data.prepare()
1292 1293 new_diff_data = diffs.DiffProcessor(new_diff)
1293 1294 new_diff_data.prepare()
1294 1295
1295 1296 return old_diff_data, new_diff_data
1296 1297
1297 1298 def _link_comments_to_version(self, pull_request_version):
1298 1299 """
1299 1300 Link all unlinked comments of this pull request to the given version.
1300 1301
1301 1302 :param pull_request_version: The `PullRequestVersion` to which
1302 1303 the comments shall be linked.
1303 1304
1304 1305 """
1305 1306 pull_request = pull_request_version.pull_request
1306 1307 comments = ChangesetComment.query()\
1307 1308 .filter(
1308 1309 # TODO: johbo: Should we query for the repo at all here?
1309 1310 # Pending decision on how comments of PRs are to be related
1310 1311 # to either the source repo, the target repo or no repo at all.
1311 1312 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1312 1313 ChangesetComment.pull_request == pull_request,
1313 1314 ChangesetComment.pull_request_version == None)\
1314 1315 .order_by(ChangesetComment.comment_id.asc())
1315 1316
1316 1317 # TODO: johbo: Find out why this breaks if it is done in a bulk
1317 1318 # operation.
1318 1319 for comment in comments:
1319 1320 comment.pull_request_version_id = (
1320 1321 pull_request_version.pull_request_version_id)
1321 1322 Session().add(comment)
1322 1323
1323 1324 def _calculate_commit_id_changes(self, old_ids, new_ids):
1324 1325 added = [x for x in new_ids if x not in old_ids]
1325 1326 common = [x for x in new_ids if x in old_ids]
1326 1327 removed = [x for x in old_ids if x not in new_ids]
1327 1328 total = new_ids
1328 1329 return ChangeTuple(added, common, removed, total)
1329 1330
1330 1331 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1331 1332
1332 1333 old_files = OrderedDict()
1333 1334 for diff_data in old_diff_data.parsed_diff:
1334 1335 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1335 1336
1336 1337 added_files = []
1337 1338 modified_files = []
1338 1339 removed_files = []
1339 1340 for diff_data in new_diff_data.parsed_diff:
1340 1341 new_filename = diff_data['filename']
1341 1342 new_hash = md5_safe(diff_data['raw_diff'])
1342 1343
1343 1344 old_hash = old_files.get(new_filename)
1344 1345 if not old_hash:
1345 1346 # file is not present in old diff, we have to figure out from parsed diff
1346 1347 # operation ADD/REMOVE
1347 1348 operations_dict = diff_data['stats']['ops']
1348 1349 if diffs.DEL_FILENODE in operations_dict:
1349 1350 removed_files.append(new_filename)
1350 1351 else:
1351 1352 added_files.append(new_filename)
1352 1353 else:
1353 1354 if new_hash != old_hash:
1354 1355 modified_files.append(new_filename)
1355 1356 # now remove a file from old, since we have seen it already
1356 1357 del old_files[new_filename]
1357 1358
1358 1359 # removed files is when there are present in old, but not in NEW,
1359 1360 # since we remove old files that are present in new diff, left-overs
1360 1361 # if any should be the removed files
1361 1362 removed_files.extend(old_files.keys())
1362 1363
1363 1364 return FileChangeTuple(added_files, modified_files, removed_files)
1364 1365
1365 1366 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1366 1367 """
1367 1368 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1368 1369 so it's always looking the same disregarding on which default
1369 1370 renderer system is using.
1370 1371
1371 1372 :param ancestor_commit_id: ancestor raw_id
1372 1373 :param changes: changes named tuple
1373 1374 :param file_changes: file changes named tuple
1374 1375
1375 1376 """
1376 1377 new_status = ChangesetStatus.get_status_lbl(
1377 1378 ChangesetStatus.STATUS_UNDER_REVIEW)
1378 1379
1379 1380 changed_files = (
1380 1381 file_changes.added + file_changes.modified + file_changes.removed)
1381 1382
1382 1383 params = {
1383 1384 'under_review_label': new_status,
1384 1385 'added_commits': changes.added,
1385 1386 'removed_commits': changes.removed,
1386 1387 'changed_files': changed_files,
1387 1388 'added_files': file_changes.added,
1388 1389 'modified_files': file_changes.modified,
1389 1390 'removed_files': file_changes.removed,
1390 1391 'ancestor_commit_id': ancestor_commit_id
1391 1392 }
1392 1393 renderer = RstTemplateRenderer()
1393 1394 return renderer.render('pull_request_update.mako', **params)
1394 1395
1395 1396 def edit(self, pull_request, title, description, description_renderer, user):
1396 1397 pull_request = self.__get_pull_request(pull_request)
1397 1398 old_data = pull_request.get_api_data(with_merge_state=False)
1398 1399 if pull_request.is_closed():
1399 1400 raise ValueError('This pull request is closed')
1400 1401 if title:
1401 1402 pull_request.title = title
1402 1403 pull_request.description = description
1403 1404 pull_request.updated_on = datetime.datetime.now()
1404 1405 pull_request.description_renderer = description_renderer
1405 1406 Session().add(pull_request)
1406 1407 self._log_audit_action(
1407 1408 'repo.pull_request.edit', {'old_data': old_data},
1408 1409 user, pull_request)
1409 1410
1410 1411 def update_reviewers(self, pull_request, reviewer_data, user):
1411 1412 """
1412 1413 Update the reviewers in the pull request
1413 1414
1414 1415 :param pull_request: the pr to update
1415 1416 :param reviewer_data: list of tuples
1416 1417 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1417 1418 :param user: current use who triggers this action
1418 1419 """
1419 1420
1420 1421 pull_request = self.__get_pull_request(pull_request)
1421 1422 if pull_request.is_closed():
1422 1423 raise ValueError('This pull request is closed')
1423 1424
1424 1425 reviewers = {}
1425 1426 for user_id, reasons, mandatory, role, rules in reviewer_data:
1426 1427 if isinstance(user_id, (int, compat.string_types)):
1427 1428 user_id = self._get_user(user_id).user_id
1428 1429 reviewers[user_id] = {
1429 1430 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1430 1431
1431 1432 reviewers_ids = set(reviewers.keys())
1432 1433 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1433 1434 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1434 1435
1435 1436 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1436 1437
1437 1438 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1438 1439 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1439 1440
1440 1441 log.debug("Adding %s reviewers", ids_to_add)
1441 1442 log.debug("Removing %s reviewers", ids_to_remove)
1442 1443 changed = False
1443 1444 added_audit_reviewers = []
1444 1445 removed_audit_reviewers = []
1445 1446
1446 1447 for uid in ids_to_add:
1447 1448 changed = True
1448 1449 _usr = self._get_user(uid)
1449 1450 reviewer = PullRequestReviewers()
1450 1451 reviewer.user = _usr
1451 1452 reviewer.pull_request = pull_request
1452 1453 reviewer.reasons = reviewers[uid]['reasons']
1453 1454 # NOTE(marcink): mandatory shouldn't be changed now
1454 1455 # reviewer.mandatory = reviewers[uid]['reasons']
1455 1456 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1456 1457 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1457 1458 Session().add(reviewer)
1458 1459 added_audit_reviewers.append(reviewer.get_dict())
1459 1460
1460 1461 for uid in ids_to_remove:
1461 1462 changed = True
1462 1463 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1463 1464 # This is an edge case that handles previous state of having the same reviewer twice.
1464 1465 # this CAN happen due to the lack of DB checks
1465 1466 reviewers = PullRequestReviewers.query()\
1466 1467 .filter(PullRequestReviewers.user_id == uid,
1467 1468 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1468 1469 PullRequestReviewers.pull_request == pull_request)\
1469 1470 .all()
1470 1471
1471 1472 for obj in reviewers:
1472 1473 added_audit_reviewers.append(obj.get_dict())
1473 1474 Session().delete(obj)
1474 1475
1475 1476 if changed:
1476 1477 Session().expire_all()
1477 1478 pull_request.updated_on = datetime.datetime.now()
1478 1479 Session().add(pull_request)
1479 1480
1480 1481 # finally store audit logs
1481 1482 for user_data in added_audit_reviewers:
1482 1483 self._log_audit_action(
1483 1484 'repo.pull_request.reviewer.add', {'data': user_data},
1484 1485 user, pull_request)
1485 1486 for user_data in removed_audit_reviewers:
1486 1487 self._log_audit_action(
1487 1488 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1488 1489 user, pull_request)
1489 1490
1490 1491 self.notify_reviewers(pull_request, ids_to_add, user)
1491 1492 return ids_to_add, ids_to_remove
1492 1493
1493 1494 def update_observers(self, pull_request, observer_data, user):
1494 1495 """
1495 1496 Update the observers in the pull request
1496 1497
1497 1498 :param pull_request: the pr to update
1498 1499 :param observer_data: list of tuples
1499 1500 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1500 1501 :param user: current use who triggers this action
1501 1502 """
1502 1503 pull_request = self.__get_pull_request(pull_request)
1503 1504 if pull_request.is_closed():
1504 1505 raise ValueError('This pull request is closed')
1505 1506
1506 1507 observers = {}
1507 1508 for user_id, reasons, mandatory, role, rules in observer_data:
1508 1509 if isinstance(user_id, (int, compat.string_types)):
1509 1510 user_id = self._get_user(user_id).user_id
1510 1511 observers[user_id] = {
1511 1512 'reasons': reasons, 'observers': mandatory, 'role': role}
1512 1513
1513 1514 observers_ids = set(observers.keys())
1514 1515 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1515 1516 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1516 1517
1517 1518 current_observers_ids = set([x.user.user_id for x in current_observers])
1518 1519
1519 1520 ids_to_add = observers_ids.difference(current_observers_ids)
1520 1521 ids_to_remove = current_observers_ids.difference(observers_ids)
1521 1522
1522 1523 log.debug("Adding %s observer", ids_to_add)
1523 1524 log.debug("Removing %s observer", ids_to_remove)
1524 1525 changed = False
1525 1526 added_audit_observers = []
1526 1527 removed_audit_observers = []
1527 1528
1528 1529 for uid in ids_to_add:
1529 1530 changed = True
1530 1531 _usr = self._get_user(uid)
1531 1532 observer = PullRequestReviewers()
1532 1533 observer.user = _usr
1533 1534 observer.pull_request = pull_request
1534 1535 observer.reasons = observers[uid]['reasons']
1535 1536 # NOTE(marcink): mandatory shouldn't be changed now
1536 1537 # observer.mandatory = observer[uid]['reasons']
1537 1538
1538 1539 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1539 1540 observer.role = PullRequestReviewers.ROLE_OBSERVER
1540 1541 Session().add(observer)
1541 1542 added_audit_observers.append(observer.get_dict())
1542 1543
1543 1544 for uid in ids_to_remove:
1544 1545 changed = True
1545 1546 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1546 1547 # This is an edge case that handles previous state of having the same reviewer twice.
1547 1548 # this CAN happen due to the lack of DB checks
1548 1549 observers = PullRequestReviewers.query()\
1549 1550 .filter(PullRequestReviewers.user_id == uid,
1550 1551 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1551 1552 PullRequestReviewers.pull_request == pull_request)\
1552 1553 .all()
1553 1554
1554 1555 for obj in observers:
1555 1556 added_audit_observers.append(obj.get_dict())
1556 1557 Session().delete(obj)
1557 1558
1558 1559 if changed:
1559 1560 Session().expire_all()
1560 1561 pull_request.updated_on = datetime.datetime.now()
1561 1562 Session().add(pull_request)
1562 1563
1563 1564 # finally store audit logs
1564 1565 for user_data in added_audit_observers:
1565 1566 self._log_audit_action(
1566 1567 'repo.pull_request.observer.add', {'data': user_data},
1567 1568 user, pull_request)
1568 1569 for user_data in removed_audit_observers:
1569 1570 self._log_audit_action(
1570 1571 'repo.pull_request.observer.delete', {'old_data': user_data},
1571 1572 user, pull_request)
1572 1573
1573 1574 self.notify_observers(pull_request, ids_to_add, user)
1574 1575 return ids_to_add, ids_to_remove
1575 1576
1576 1577 def get_url(self, pull_request, request=None, permalink=False):
1577 1578 if not request:
1578 1579 request = get_current_request()
1579 1580
1580 1581 if permalink:
1581 1582 return request.route_url(
1582 1583 'pull_requests_global',
1583 1584 pull_request_id=pull_request.pull_request_id,)
1584 1585 else:
1585 1586 return request.route_url('pullrequest_show',
1586 1587 repo_name=safe_str(pull_request.target_repo.repo_name),
1587 1588 pull_request_id=pull_request.pull_request_id,)
1588 1589
1589 1590 def get_shadow_clone_url(self, pull_request, request=None):
1590 1591 """
1591 1592 Returns qualified url pointing to the shadow repository. If this pull
1592 1593 request is closed there is no shadow repository and ``None`` will be
1593 1594 returned.
1594 1595 """
1595 1596 if pull_request.is_closed():
1596 1597 return None
1597 1598 else:
1598 1599 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1599 1600 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1600 1601
1601 1602 def _notify_reviewers(self, pull_request, user_ids, role, user):
1602 1603 # notification to reviewers/observers
1603 1604 if not user_ids:
1604 1605 return
1605 1606
1606 1607 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1607 1608
1608 1609 pull_request_obj = pull_request
1609 1610 # get the current participants of this pull request
1610 1611 recipients = user_ids
1611 1612 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1612 1613
1613 1614 pr_source_repo = pull_request_obj.source_repo
1614 1615 pr_target_repo = pull_request_obj.target_repo
1615 1616
1616 1617 pr_url = h.route_url('pullrequest_show',
1617 1618 repo_name=pr_target_repo.repo_name,
1618 1619 pull_request_id=pull_request_obj.pull_request_id,)
1619 1620
1620 1621 # set some variables for email notification
1621 1622 pr_target_repo_url = h.route_url(
1622 1623 'repo_summary', repo_name=pr_target_repo.repo_name)
1623 1624
1624 1625 pr_source_repo_url = h.route_url(
1625 1626 'repo_summary', repo_name=pr_source_repo.repo_name)
1626 1627
1627 1628 # pull request specifics
1628 1629 pull_request_commits = [
1629 1630 (x.raw_id, x.message)
1630 1631 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1631 1632
1632 1633 current_rhodecode_user = user
1633 1634 kwargs = {
1634 1635 'user': current_rhodecode_user,
1635 1636 'pull_request_author': pull_request.author,
1636 1637 'pull_request': pull_request_obj,
1637 1638 'pull_request_commits': pull_request_commits,
1638 1639
1639 1640 'pull_request_target_repo': pr_target_repo,
1640 1641 'pull_request_target_repo_url': pr_target_repo_url,
1641 1642
1642 1643 'pull_request_source_repo': pr_source_repo,
1643 1644 'pull_request_source_repo_url': pr_source_repo_url,
1644 1645
1645 1646 'pull_request_url': pr_url,
1646 1647 'thread_ids': [pr_url],
1647 1648 'user_role': role
1648 1649 }
1649 1650
1650 1651 # create notification objects, and emails
1651 1652 NotificationModel().create(
1652 1653 created_by=current_rhodecode_user,
1653 1654 notification_subject='', # Filled in based on the notification_type
1654 1655 notification_body='', # Filled in based on the notification_type
1655 1656 notification_type=notification_type,
1656 1657 recipients=recipients,
1657 1658 email_kwargs=kwargs,
1658 1659 )
1659 1660
1660 1661 def notify_reviewers(self, pull_request, reviewers_ids, user):
1661 1662 return self._notify_reviewers(pull_request, reviewers_ids,
1662 1663 PullRequestReviewers.ROLE_REVIEWER, user)
1663 1664
1664 1665 def notify_observers(self, pull_request, observers_ids, user):
1665 1666 return self._notify_reviewers(pull_request, observers_ids,
1666 1667 PullRequestReviewers.ROLE_OBSERVER, user)
1667 1668
1668 1669 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1669 1670 commit_changes, file_changes):
1670 1671
1671 1672 updating_user_id = updating_user.user_id
1672 1673 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1673 1674 # NOTE(marcink): send notification to all other users except to
1674 1675 # person who updated the PR
1675 1676 recipients = reviewers.difference(set([updating_user_id]))
1676 1677
1677 1678 log.debug('Notify following recipients about pull-request update %s', recipients)
1678 1679
1679 1680 pull_request_obj = pull_request
1680 1681
1681 1682 # send email about the update
1682 1683 changed_files = (
1683 1684 file_changes.added + file_changes.modified + file_changes.removed)
1684 1685
1685 1686 pr_source_repo = pull_request_obj.source_repo
1686 1687 pr_target_repo = pull_request_obj.target_repo
1687 1688
1688 1689 pr_url = h.route_url('pullrequest_show',
1689 1690 repo_name=pr_target_repo.repo_name,
1690 1691 pull_request_id=pull_request_obj.pull_request_id,)
1691 1692
1692 1693 # set some variables for email notification
1693 1694 pr_target_repo_url = h.route_url(
1694 1695 'repo_summary', repo_name=pr_target_repo.repo_name)
1695 1696
1696 1697 pr_source_repo_url = h.route_url(
1697 1698 'repo_summary', repo_name=pr_source_repo.repo_name)
1698 1699
1699 1700 email_kwargs = {
1700 1701 'date': datetime.datetime.now(),
1701 1702 'updating_user': updating_user,
1702 1703
1703 1704 'pull_request': pull_request_obj,
1704 1705
1705 1706 'pull_request_target_repo': pr_target_repo,
1706 1707 'pull_request_target_repo_url': pr_target_repo_url,
1707 1708
1708 1709 'pull_request_source_repo': pr_source_repo,
1709 1710 'pull_request_source_repo_url': pr_source_repo_url,
1710 1711
1711 1712 'pull_request_url': pr_url,
1712 1713
1713 1714 'ancestor_commit_id': ancestor_commit_id,
1714 1715 'added_commits': commit_changes.added,
1715 1716 'removed_commits': commit_changes.removed,
1716 1717 'changed_files': changed_files,
1717 1718 'added_files': file_changes.added,
1718 1719 'modified_files': file_changes.modified,
1719 1720 'removed_files': file_changes.removed,
1720 1721 'thread_ids': [pr_url],
1721 1722 }
1722 1723
1723 1724 # create notification objects, and emails
1724 1725 NotificationModel().create(
1725 1726 created_by=updating_user,
1726 1727 notification_subject='', # Filled in based on the notification_type
1727 1728 notification_body='', # Filled in based on the notification_type
1728 1729 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1729 1730 recipients=recipients,
1730 1731 email_kwargs=email_kwargs,
1731 1732 )
1732 1733
1733 1734 def delete(self, pull_request, user=None):
1734 1735 if not user:
1735 1736 user = getattr(get_current_rhodecode_user(), 'username', None)
1736 1737
1737 1738 pull_request = self.__get_pull_request(pull_request)
1738 1739 old_data = pull_request.get_api_data(with_merge_state=False)
1739 1740 self._cleanup_merge_workspace(pull_request)
1740 1741 self._log_audit_action(
1741 1742 'repo.pull_request.delete', {'old_data': old_data},
1742 1743 user, pull_request)
1743 1744 Session().delete(pull_request)
1744 1745
1745 1746 def close_pull_request(self, pull_request, user):
1746 1747 pull_request = self.__get_pull_request(pull_request)
1747 1748 self._cleanup_merge_workspace(pull_request)
1748 1749 pull_request.status = PullRequest.STATUS_CLOSED
1749 1750 pull_request.updated_on = datetime.datetime.now()
1750 1751 Session().add(pull_request)
1751 1752 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1752 1753
1753 1754 pr_data = pull_request.get_api_data(with_merge_state=False)
1754 1755 self._log_audit_action(
1755 1756 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1756 1757
1757 1758 def close_pull_request_with_comment(
1758 1759 self, pull_request, user, repo, message=None, auth_user=None):
1759 1760
1760 1761 pull_request_review_status = pull_request.calculated_review_status()
1761 1762
1762 1763 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1763 1764 # approved only if we have voting consent
1764 1765 status = ChangesetStatus.STATUS_APPROVED
1765 1766 else:
1766 1767 status = ChangesetStatus.STATUS_REJECTED
1767 1768 status_lbl = ChangesetStatus.get_status_lbl(status)
1768 1769
1769 1770 default_message = (
1770 1771 'Closing with status change {transition_icon} {status}.'
1771 1772 ).format(transition_icon='>', status=status_lbl)
1772 1773 text = message or default_message
1773 1774
1774 1775 # create a comment, and link it to new status
1775 1776 comment = CommentsModel().create(
1776 1777 text=text,
1777 1778 repo=repo.repo_id,
1778 1779 user=user.user_id,
1779 1780 pull_request=pull_request.pull_request_id,
1780 1781 status_change=status_lbl,
1781 1782 status_change_type=status,
1782 1783 closing_pr=True,
1783 1784 auth_user=auth_user,
1784 1785 )
1785 1786
1786 1787 # calculate old status before we change it
1787 1788 old_calculated_status = pull_request.calculated_review_status()
1788 1789 ChangesetStatusModel().set_status(
1789 1790 repo.repo_id,
1790 1791 status,
1791 1792 user.user_id,
1792 1793 comment=comment,
1793 1794 pull_request=pull_request.pull_request_id
1794 1795 )
1795 1796
1796 1797 Session().flush()
1797 1798
1798 1799 self.trigger_pull_request_hook(pull_request, user, 'comment',
1799 1800 data={'comment': comment})
1800 1801
1801 1802 # we now calculate the status of pull request again, and based on that
1802 1803 # calculation trigger status change. This might happen in cases
1803 1804 # that non-reviewer admin closes a pr, which means his vote doesn't
1804 1805 # change the status, while if he's a reviewer this might change it.
1805 1806 calculated_status = pull_request.calculated_review_status()
1806 1807 if old_calculated_status != calculated_status:
1807 1808 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1808 1809 data={'status': calculated_status})
1809 1810
1810 1811 # finally close the PR
1811 1812 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1812 1813
1813 1814 return comment, status
1814 1815
1815 1816 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1816 1817 _ = translator or get_current_request().translate
1817 1818
1818 1819 if not self._is_merge_enabled(pull_request):
1819 1820 return None, False, _('Server-side pull request merging is disabled.')
1820 1821
1821 1822 if pull_request.is_closed():
1822 1823 return None, False, _('This pull request is closed.')
1823 1824
1824 1825 merge_possible, msg = self._check_repo_requirements(
1825 1826 target=pull_request.target_repo, source=pull_request.source_repo,
1826 1827 translator=_)
1827 1828 if not merge_possible:
1828 1829 return None, merge_possible, msg
1829 1830
1830 1831 try:
1831 1832 merge_response = self._try_merge(
1832 1833 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1833 1834 log.debug("Merge response: %s", merge_response)
1834 1835 return merge_response, merge_response.possible, merge_response.merge_status_message
1835 1836 except NotImplementedError:
1836 1837 return None, False, _('Pull request merging is not supported.')
1837 1838
1838 1839 def _check_repo_requirements(self, target, source, translator):
1839 1840 """
1840 1841 Check if `target` and `source` have compatible requirements.
1841 1842
1842 1843 Currently this is just checking for largefiles.
1843 1844 """
1844 1845 _ = translator
1845 1846 target_has_largefiles = self._has_largefiles(target)
1846 1847 source_has_largefiles = self._has_largefiles(source)
1847 1848 merge_possible = True
1848 1849 message = u''
1849 1850
1850 1851 if target_has_largefiles != source_has_largefiles:
1851 1852 merge_possible = False
1852 1853 if source_has_largefiles:
1853 1854 message = _(
1854 1855 'Target repository large files support is disabled.')
1855 1856 else:
1856 1857 message = _(
1857 1858 'Source repository large files support is disabled.')
1858 1859
1859 1860 return merge_possible, message
1860 1861
1861 1862 def _has_largefiles(self, repo):
1862 1863 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1863 1864 'extensions', 'largefiles')
1864 1865 return largefiles_ui and largefiles_ui[0].active
1865 1866
1866 1867 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1867 1868 """
1868 1869 Try to merge the pull request and return the merge status.
1869 1870 """
1870 1871 log.debug(
1871 1872 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1872 1873 pull_request.pull_request_id, force_shadow_repo_refresh)
1873 1874 target_vcs = pull_request.target_repo.scm_instance()
1874 1875 # Refresh the target reference.
1875 1876 try:
1876 1877 target_ref = self._refresh_reference(
1877 1878 pull_request.target_ref_parts, target_vcs)
1878 1879 except CommitDoesNotExistError:
1879 1880 merge_state = MergeResponse(
1880 1881 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1881 1882 metadata={'target_ref': pull_request.target_ref_parts})
1882 1883 return merge_state
1883 1884
1884 1885 target_locked = pull_request.target_repo.locked
1885 1886 if target_locked and target_locked[0]:
1886 1887 locked_by = 'user:{}'.format(target_locked[0])
1887 1888 log.debug("The target repository is locked by %s.", locked_by)
1888 1889 merge_state = MergeResponse(
1889 1890 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1890 1891 metadata={'locked_by': locked_by})
1891 1892 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1892 1893 pull_request, target_ref):
1893 1894 log.debug("Refreshing the merge status of the repository.")
1894 1895 merge_state = self._refresh_merge_state(
1895 1896 pull_request, target_vcs, target_ref)
1896 1897 else:
1897 1898 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1898 1899 metadata = {
1899 1900 'unresolved_files': '',
1900 1901 'target_ref': pull_request.target_ref_parts,
1901 1902 'source_ref': pull_request.source_ref_parts,
1902 1903 }
1903 1904 if pull_request.last_merge_metadata:
1904 1905 metadata.update(pull_request.last_merge_metadata_parsed)
1905 1906
1906 1907 if not possible and target_ref.type == 'branch':
1907 1908 # NOTE(marcink): case for mercurial multiple heads on branch
1908 1909 heads = target_vcs._heads(target_ref.name)
1909 1910 if len(heads) != 1:
1910 1911 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1911 1912 metadata.update({
1912 1913 'heads': heads
1913 1914 })
1914 1915
1915 1916 merge_state = MergeResponse(
1916 1917 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1917 1918
1918 1919 return merge_state
1919 1920
1920 1921 def _refresh_reference(self, reference, vcs_repository):
1921 1922 if reference.type in self.UPDATABLE_REF_TYPES:
1922 1923 name_or_id = reference.name
1923 1924 else:
1924 1925 name_or_id = reference.commit_id
1925 1926
1926 1927 refreshed_commit = vcs_repository.get_commit(name_or_id)
1927 1928 refreshed_reference = Reference(
1928 1929 reference.type, reference.name, refreshed_commit.raw_id)
1929 1930 return refreshed_reference
1930 1931
1931 1932 def _needs_merge_state_refresh(self, pull_request, target_reference):
1932 1933 return not(
1933 1934 pull_request.revisions and
1934 1935 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1935 1936 target_reference.commit_id == pull_request._last_merge_target_rev)
1936 1937
1937 1938 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1938 1939 workspace_id = self._workspace_id(pull_request)
1939 1940 source_vcs = pull_request.source_repo.scm_instance()
1940 1941 repo_id = pull_request.target_repo.repo_id
1941 1942 use_rebase = self._use_rebase_for_merging(pull_request)
1942 1943 close_branch = self._close_branch_before_merging(pull_request)
1943 1944 merge_state = target_vcs.merge(
1944 1945 repo_id, workspace_id,
1945 1946 target_reference, source_vcs, pull_request.source_ref_parts,
1946 1947 dry_run=True, use_rebase=use_rebase,
1947 1948 close_branch=close_branch)
1948 1949
1949 1950 # Do not store the response if there was an unknown error.
1950 1951 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1951 1952 pull_request._last_merge_source_rev = \
1952 1953 pull_request.source_ref_parts.commit_id
1953 1954 pull_request._last_merge_target_rev = target_reference.commit_id
1954 1955 pull_request.last_merge_status = merge_state.failure_reason
1955 1956 pull_request.last_merge_metadata = merge_state.metadata
1956 1957
1957 1958 pull_request.shadow_merge_ref = merge_state.merge_ref
1958 1959 Session().add(pull_request)
1959 1960 Session().commit()
1960 1961
1961 1962 return merge_state
1962 1963
1963 1964 def _workspace_id(self, pull_request):
1964 1965 workspace_id = 'pr-%s' % pull_request.pull_request_id
1965 1966 return workspace_id
1966 1967
1967 1968 def generate_repo_data(self, repo, commit_id=None, branch=None,
1968 1969 bookmark=None, translator=None):
1969 1970 from rhodecode.model.repo import RepoModel
1970 1971
1971 1972 all_refs, selected_ref = \
1972 1973 self._get_repo_pullrequest_sources(
1973 1974 repo.scm_instance(), commit_id=commit_id,
1974 1975 branch=branch, bookmark=bookmark, translator=translator)
1975 1976
1976 1977 refs_select2 = []
1977 1978 for element in all_refs:
1978 1979 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1979 1980 refs_select2.append({'text': element[1], 'children': children})
1980 1981
1981 1982 return {
1982 1983 'user': {
1983 1984 'user_id': repo.user.user_id,
1984 1985 'username': repo.user.username,
1985 1986 'firstname': repo.user.first_name,
1986 1987 'lastname': repo.user.last_name,
1987 1988 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1988 1989 },
1989 1990 'name': repo.repo_name,
1990 1991 'link': RepoModel().get_url(repo),
1991 1992 'description': h.chop_at_smart(repo.description_safe, '\n'),
1992 1993 'refs': {
1993 1994 'all_refs': all_refs,
1994 1995 'selected_ref': selected_ref,
1995 1996 'select2_refs': refs_select2
1996 1997 }
1997 1998 }
1998 1999
1999 2000 def generate_pullrequest_title(self, source, source_ref, target):
2000 2001 return u'{source}#{at_ref} to {target}'.format(
2001 2002 source=source,
2002 2003 at_ref=source_ref,
2003 2004 target=target,
2004 2005 )
2005 2006
2006 2007 def _cleanup_merge_workspace(self, pull_request):
2007 2008 # Merging related cleanup
2008 2009 repo_id = pull_request.target_repo.repo_id
2009 2010 target_scm = pull_request.target_repo.scm_instance()
2010 2011 workspace_id = self._workspace_id(pull_request)
2011 2012
2012 2013 try:
2013 2014 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2014 2015 except NotImplementedError:
2015 2016 pass
2016 2017
2017 2018 def _get_repo_pullrequest_sources(
2018 2019 self, repo, commit_id=None, branch=None, bookmark=None,
2019 2020 translator=None):
2020 2021 """
2021 2022 Return a structure with repo's interesting commits, suitable for
2022 2023 the selectors in pullrequest controller
2023 2024
2024 2025 :param commit_id: a commit that must be in the list somehow
2025 2026 and selected by default
2026 2027 :param branch: a branch that must be in the list and selected
2027 2028 by default - even if closed
2028 2029 :param bookmark: a bookmark that must be in the list and selected
2029 2030 """
2030 2031 _ = translator or get_current_request().translate
2031 2032
2032 2033 commit_id = safe_str(commit_id) if commit_id else None
2033 2034 branch = safe_unicode(branch) if branch else None
2034 2035 bookmark = safe_unicode(bookmark) if bookmark else None
2035 2036
2036 2037 selected = None
2037 2038
2038 2039 # order matters: first source that has commit_id in it will be selected
2039 2040 sources = []
2040 2041 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2041 2042 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2042 2043
2043 2044 if commit_id:
2044 2045 ref_commit = (h.short_id(commit_id), commit_id)
2045 2046 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2046 2047
2047 2048 sources.append(
2048 2049 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2049 2050 )
2050 2051
2051 2052 groups = []
2052 2053
2053 2054 for group_key, ref_list, group_name, match in sources:
2054 2055 group_refs = []
2055 2056 for ref_name, ref_id in ref_list:
2056 2057 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2057 2058 group_refs.append((ref_key, ref_name))
2058 2059
2059 2060 if not selected:
2060 2061 if set([commit_id, match]) & set([ref_id, ref_name]):
2061 2062 selected = ref_key
2062 2063
2063 2064 if group_refs:
2064 2065 groups.append((group_refs, group_name))
2065 2066
2066 2067 if not selected:
2067 2068 ref = commit_id or branch or bookmark
2068 2069 if ref:
2069 2070 raise CommitDoesNotExistError(
2070 2071 u'No commit refs could be found matching: {}'.format(ref))
2071 2072 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2072 2073 selected = u'branch:{}:{}'.format(
2073 2074 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2074 2075 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2075 2076 )
2076 2077 elif repo.commit_ids:
2077 2078 # make the user select in this case
2078 2079 selected = None
2079 2080 else:
2080 2081 raise EmptyRepositoryError()
2081 2082 return groups, selected
2082 2083
2083 2084 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2084 2085 hide_whitespace_changes, diff_context):
2085 2086
2086 2087 return self._get_diff_from_pr_or_version(
2087 2088 source_repo, source_ref_id, target_ref_id,
2088 2089 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2089 2090
2090 2091 def _get_diff_from_pr_or_version(
2091 2092 self, source_repo, source_ref_id, target_ref_id,
2092 2093 hide_whitespace_changes, diff_context):
2093 2094
2094 2095 target_commit = source_repo.get_commit(
2095 2096 commit_id=safe_str(target_ref_id))
2096 2097 source_commit = source_repo.get_commit(
2097 2098 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2098 2099 if isinstance(source_repo, Repository):
2099 2100 vcs_repo = source_repo.scm_instance()
2100 2101 else:
2101 2102 vcs_repo = source_repo
2102 2103
2103 2104 # TODO: johbo: In the context of an update, we cannot reach
2104 2105 # the old commit anymore with our normal mechanisms. It needs
2105 2106 # some sort of special support in the vcs layer to avoid this
2106 2107 # workaround.
2107 2108 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2108 2109 vcs_repo.alias == 'git'):
2109 2110 source_commit.raw_id = safe_str(source_ref_id)
2110 2111
2111 2112 log.debug('calculating diff between '
2112 2113 'source_ref:%s and target_ref:%s for repo `%s`',
2113 2114 target_ref_id, source_ref_id,
2114 2115 safe_unicode(vcs_repo.path))
2115 2116
2116 2117 vcs_diff = vcs_repo.get_diff(
2117 2118 commit1=target_commit, commit2=source_commit,
2118 2119 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2119 2120 return vcs_diff
2120 2121
2121 2122 def _is_merge_enabled(self, pull_request):
2122 2123 return self._get_general_setting(
2123 2124 pull_request, 'rhodecode_pr_merge_enabled')
2124 2125
2125 2126 def _use_rebase_for_merging(self, pull_request):
2126 2127 repo_type = pull_request.target_repo.repo_type
2127 2128 if repo_type == 'hg':
2128 2129 return self._get_general_setting(
2129 2130 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2130 2131 elif repo_type == 'git':
2131 2132 return self._get_general_setting(
2132 2133 pull_request, 'rhodecode_git_use_rebase_for_merging')
2133 2134
2134 2135 return False
2135 2136
2136 2137 def _user_name_for_merging(self, pull_request, user):
2137 2138 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2138 2139 if env_user_name_attr and hasattr(user, env_user_name_attr):
2139 2140 user_name_attr = env_user_name_attr
2140 2141 else:
2141 2142 user_name_attr = 'short_contact'
2142 2143
2143 2144 user_name = getattr(user, user_name_attr)
2144 2145 return user_name
2145 2146
2146 2147 def _close_branch_before_merging(self, pull_request):
2147 2148 repo_type = pull_request.target_repo.repo_type
2148 2149 if repo_type == 'hg':
2149 2150 return self._get_general_setting(
2150 2151 pull_request, 'rhodecode_hg_close_branch_before_merging')
2151 2152 elif repo_type == 'git':
2152 2153 return self._get_general_setting(
2153 2154 pull_request, 'rhodecode_git_close_branch_before_merging')
2154 2155
2155 2156 return False
2156 2157
2157 2158 def _get_general_setting(self, pull_request, settings_key, default=False):
2158 2159 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2159 2160 settings = settings_model.get_general_settings()
2160 2161 return settings.get(settings_key, default)
2161 2162
2162 2163 def _log_audit_action(self, action, action_data, user, pull_request):
2163 2164 audit_logger.store(
2164 2165 action=action,
2165 2166 action_data=action_data,
2166 2167 user=user,
2167 2168 repo=pull_request.target_repo)
2168 2169
2169 2170 def get_reviewer_functions(self):
2170 2171 """
2171 2172 Fetches functions for validation and fetching default reviewers.
2172 2173 If available we use the EE package, else we fallback to CE
2173 2174 package functions
2174 2175 """
2175 2176 try:
2176 2177 from rc_reviewers.utils import get_default_reviewers_data
2177 2178 from rc_reviewers.utils import validate_default_reviewers
2178 2179 from rc_reviewers.utils import validate_observers
2179 2180 except ImportError:
2180 2181 from rhodecode.apps.repository.utils import get_default_reviewers_data
2181 2182 from rhodecode.apps.repository.utils import validate_default_reviewers
2182 2183 from rhodecode.apps.repository.utils import validate_observers
2183 2184
2184 2185 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2185 2186
2186 2187
2187 2188 class MergeCheck(object):
2188 2189 """
2189 2190 Perform Merge Checks and returns a check object which stores information
2190 2191 about merge errors, and merge conditions
2191 2192 """
2192 2193 TODO_CHECK = 'todo'
2193 2194 PERM_CHECK = 'perm'
2194 2195 REVIEW_CHECK = 'review'
2195 2196 MERGE_CHECK = 'merge'
2196 2197 WIP_CHECK = 'wip'
2197 2198
2198 2199 def __init__(self):
2199 2200 self.review_status = None
2200 2201 self.merge_possible = None
2201 2202 self.merge_msg = ''
2202 2203 self.merge_response = None
2203 2204 self.failed = None
2204 2205 self.errors = []
2205 2206 self.error_details = OrderedDict()
2206 2207 self.source_commit = AttributeDict()
2207 2208 self.target_commit = AttributeDict()
2208 2209 self.reviewers_count = 0
2209 2210 self.observers_count = 0
2210 2211
2211 2212 def __repr__(self):
2212 2213 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2213 2214 self.merge_possible, self.failed, self.errors)
2214 2215
2215 2216 def push_error(self, error_type, message, error_key, details):
2216 2217 self.failed = True
2217 2218 self.errors.append([error_type, message])
2218 2219 self.error_details[error_key] = dict(
2219 2220 details=details,
2220 2221 error_type=error_type,
2221 2222 message=message
2222 2223 )
2223 2224
2224 2225 @classmethod
2225 2226 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2226 2227 force_shadow_repo_refresh=False):
2227 2228 _ = translator
2228 2229 merge_check = cls()
2229 2230
2230 2231 # title has WIP:
2231 2232 if pull_request.work_in_progress:
2232 2233 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2233 2234
2234 2235 msg = _('WIP marker in title prevents from accidental merge.')
2235 2236 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2236 2237 if fail_early:
2237 2238 return merge_check
2238 2239
2239 2240 # permissions to merge
2240 2241 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2241 2242 if not user_allowed_to_merge:
2242 2243 log.debug("MergeCheck: cannot merge, approval is pending.")
2243 2244
2244 2245 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2245 2246 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2246 2247 if fail_early:
2247 2248 return merge_check
2248 2249
2249 2250 # permission to merge into the target branch
2250 2251 target_commit_id = pull_request.target_ref_parts.commit_id
2251 2252 if pull_request.target_ref_parts.type == 'branch':
2252 2253 branch_name = pull_request.target_ref_parts.name
2253 2254 else:
2254 2255 # for mercurial we can always figure out the branch from the commit
2255 2256 # in case of bookmark
2256 2257 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2257 2258 branch_name = target_commit.branch
2258 2259
2259 2260 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2260 2261 pull_request.target_repo.repo_name, branch_name)
2261 2262 if branch_perm and branch_perm == 'branch.none':
2262 2263 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2263 2264 branch_name, rule)
2264 2265 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2265 2266 if fail_early:
2266 2267 return merge_check
2267 2268
2268 2269 # review status, must be always present
2269 2270 review_status = pull_request.calculated_review_status()
2270 2271 merge_check.review_status = review_status
2271 2272 merge_check.reviewers_count = pull_request.reviewers_count
2272 2273 merge_check.observers_count = pull_request.observers_count
2273 2274
2274 2275 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2275 2276 if not status_approved and merge_check.reviewers_count:
2276 2277 log.debug("MergeCheck: cannot merge, approval is pending.")
2277 2278 msg = _('Pull request reviewer approval is pending.')
2278 2279
2279 2280 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2280 2281
2281 2282 if fail_early:
2282 2283 return merge_check
2283 2284
2284 2285 # left over TODOs
2285 2286 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2286 2287 if todos:
2287 2288 log.debug("MergeCheck: cannot merge, {} "
2288 2289 "unresolved TODOs left.".format(len(todos)))
2289 2290
2290 2291 if len(todos) == 1:
2291 2292 msg = _('Cannot merge, {} TODO still not resolved.').format(
2292 2293 len(todos))
2293 2294 else:
2294 2295 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2295 2296 len(todos))
2296 2297
2297 2298 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2298 2299
2299 2300 if fail_early:
2300 2301 return merge_check
2301 2302
2302 2303 # merge possible, here is the filesystem simulation + shadow repo
2303 2304 merge_response, merge_status, msg = PullRequestModel().merge_status(
2304 2305 pull_request, translator=translator,
2305 2306 force_shadow_repo_refresh=force_shadow_repo_refresh)
2306 2307
2307 2308 merge_check.merge_possible = merge_status
2308 2309 merge_check.merge_msg = msg
2309 2310 merge_check.merge_response = merge_response
2310 2311
2311 2312 source_ref_id = pull_request.source_ref_parts.commit_id
2312 2313 target_ref_id = pull_request.target_ref_parts.commit_id
2313 2314
2314 2315 try:
2315 2316 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2316 2317 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2317 2318 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2318 2319 merge_check.source_commit.current_raw_id = source_commit.raw_id
2319 2320 merge_check.source_commit.previous_raw_id = source_ref_id
2320 2321
2321 2322 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2322 2323 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2323 2324 merge_check.target_commit.current_raw_id = target_commit.raw_id
2324 2325 merge_check.target_commit.previous_raw_id = target_ref_id
2325 2326 except (SourceRefMissing, TargetRefMissing):
2326 2327 pass
2327 2328
2328 2329 if not merge_status:
2329 2330 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2330 2331 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2331 2332
2332 2333 if fail_early:
2333 2334 return merge_check
2334 2335
2335 2336 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2336 2337 return merge_check
2337 2338
2338 2339 @classmethod
2339 2340 def get_merge_conditions(cls, pull_request, translator):
2340 2341 _ = translator
2341 2342 merge_details = {}
2342 2343
2343 2344 model = PullRequestModel()
2344 2345 use_rebase = model._use_rebase_for_merging(pull_request)
2345 2346
2346 2347 if use_rebase:
2347 2348 merge_details['merge_strategy'] = dict(
2348 2349 details={},
2349 2350 message=_('Merge strategy: rebase')
2350 2351 )
2351 2352 else:
2352 2353 merge_details['merge_strategy'] = dict(
2353 2354 details={},
2354 2355 message=_('Merge strategy: explicit merge commit')
2355 2356 )
2356 2357
2357 2358 close_branch = model._close_branch_before_merging(pull_request)
2358 2359 if close_branch:
2359 2360 repo_type = pull_request.target_repo.repo_type
2360 2361 close_msg = ''
2361 2362 if repo_type == 'hg':
2362 2363 close_msg = _('Source branch will be closed before the merge.')
2363 2364 elif repo_type == 'git':
2364 2365 close_msg = _('Source branch will be deleted after the merge.')
2365 2366
2366 2367 merge_details['close_branch'] = dict(
2367 2368 details={},
2368 2369 message=close_msg
2369 2370 )
2370 2371
2371 2372 return merge_details
2372 2373
2373 2374
2374 2375 ChangeTuple = collections.namedtuple(
2375 2376 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2376 2377
2377 2378 FileChangeTuple = collections.namedtuple(
2378 2379 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now