##// END OF EJS Templates
user-groups: fix potential problem with group sync of external plugins....
user-groups: fix potential problem with group sync of external plugins. - when using external plugin we used to check for a parameter that set the sync mode. The problem is we only checked if the flag was there. So toggling sync on and off set the value and then left the key still set but with None. This confused the sync and thought the group should be synced !

File last commit:

r2082:db993005 default
r2143:4314e88b default
Show More
pull_request.py
1611 lines | 63.2 KiB | text/x-python | PythonLexer
project: added all source files and assets
r1 # -*- coding: utf-8 -*-
license: updated copyright year to 2017
r1271 # Copyright (C) 2012-2017 RhodeCode GmbH
project: added all source files and assets
r1 #
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License, version 3
# (only), as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This program is dual-licensed. If you wish to learn more about the
# RhodeCode Enterprise Edition, including its added features, Support services,
# and proprietary license terms, please see https://rhodecode.com/licenses/
"""
pull request model for RhodeCode
"""
from collections import namedtuple
import json
import logging
import datetime
Martin Bornhold
shadow: Remove URL quoting of shadow repository pull URL.
r917 import urllib
project: added all source files and assets
r1
from pylons.i18n.translation import _
from pylons.i18n.translation import lazy_ugettext
events: expose permalink urls for different set of object....
r1788 from pyramid.threadlocal import get_current_request
pull-requests: redo my account pull request page with datagrid. Fixes #4297...
r1084 from sqlalchemy import or_
project: added all source files and assets
r1
pull-request-api: updated logic of closing a PR via API call....
r1792 from rhodecode import events
project: added all source files and assets
r1 from rhodecode.lib import helpers as h, hooks_utils, diffs
audit-logs: implemented pull request and comment events.
r1807 from rhodecode.lib import audit_logger
project: added all source files and assets
r1 from rhodecode.lib.compat import OrderedDict
from rhodecode.lib.hooks_daemon import prepare_callback_daemon
from rhodecode.lib.markup_renderer import (
DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
from rhodecode.lib.vcs.backends.base import (
Martin Bornhold
pr: Add update status messages dict.
r1072 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
Martin Bornhold
config: Use hooks protocol and direc calls seting from vcs settings module.
r590 from rhodecode.lib.vcs.conf import settings as vcs_settings
project: added all source files and assets
r1 from rhodecode.lib.vcs.exceptions import (
CommitDoesNotExistError, EmptyRepositoryError)
from rhodecode.model import BaseModel
from rhodecode.model.changeset_status import ChangesetStatusModel
comments: renamed ChangesetCommentsModel to CommentsModel to reflect what it actually does....
r1323 from rhodecode.model.comment import CommentsModel
project: added all source files and assets
r1 from rhodecode.model.db import (
Martin Bornhold
config: Use hooks protocol and direc calls seting from vcs settings module.
r590 PullRequest, PullRequestReviewers, ChangesetStatus,
pr-versioning: implemented versioning for pull requests....
r1368 PullRequestVersion, ChangesetComment, Repository)
project: added all source files and assets
r1 from rhodecode.model.meta import Session
from rhodecode.model.notification import NotificationModel, \
EmailNotificationModel
from rhodecode.model.scm import ScmModel
from rhodecode.model.settings import VcsSettingsModel
log = logging.getLogger(__name__)
Martin Bornhold
pr: Add UpdateResponse data structure to hold response data when updating commits.
r1073 # Data structure to hold the response data when updating commits during a pull
# request update.
pull-requests: change the update commits logic to handle target changes better....
r1601 UpdateResponse = namedtuple('UpdateResponse', [
'executed', 'reason', 'new', 'old', 'changes',
'source_changed', 'target_changed'])
Martin Bornhold
pr: Add UpdateResponse data structure to hold response data when updating commits.
r1073
project: added all source files and assets
r1 class PullRequestModel(BaseModel):
cls = PullRequest
DIFF_CONTEXT = 3
MERGE_STATUS_MESSAGES = {
MergeFailureReason.NONE: lazy_ugettext(
'This pull request can be automatically merged.'),
MergeFailureReason.UNKNOWN: lazy_ugettext(
'This pull request cannot be merged because of an unhandled'
' exception.'),
MergeFailureReason.MERGE_FAILED: lazy_ugettext(
merge-logic: improve merge failed because of conflicts message.
r1345 'This pull request cannot be merged because of merge conflicts.'),
project: added all source files and assets
r1 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
'This pull request could not be merged because push to target'
' failed.'),
MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
'This pull request cannot be merged because the target is not a'
' head.'),
MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
'This pull request cannot be merged because the source contains'
' more branches than the target.'),
MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
'This pull request cannot be merged because the target has'
' multiple heads.'),
MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
'This pull request cannot be merged because the target repository'
' is locked.'),
Martin Bornhold
vcs: Deprecate generic missing commit merge error reason....
r1081 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
project: added all source files and assets
r1 'This pull request cannot be merged because the target or the '
'source reference is missing.'),
Martin Bornhold
pr: Add error messages for new merge failure reasons.
r1068 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
'This pull request cannot be merged because the target '
'reference is missing.'),
MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
'This pull request cannot be merged because the source '
'reference is missing.'),
Martin Bornhold
subrepo: Add merge failure reason code ad message for subrepo merge conflicts.
r1106 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
'This pull request cannot be merged because of conflicts related '
'to sub repositories.'),
project: added all source files and assets
r1 }
Martin Bornhold
pr: Add update status messages dict.
r1072 UPDATE_STATUS_MESSAGES = {
UpdateFailureReason.NONE: lazy_ugettext(
'Pull request update successful.'),
UpdateFailureReason.UNKNOWN: lazy_ugettext(
'Pull request update failed because of an unknown error.'),
UpdateFailureReason.NO_CHANGE: lazy_ugettext(
pull-requests: change the update commits logic to handle target changes better....
r1601 'No update needed because the source and target have not changed.'),
pull-request: fixed typo in wrong ref type error, and added which...
r1687 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
Martin Bornhold
pr: Add update status messages dict.
r1072 'Pull request cannot be updated because the reference type is '
pull-request: fixed typo in wrong ref type error, and added which...
r1687 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
Martin Bornhold
pr: Add update status messages dict.
r1072 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
'This pull request cannot be updated because the target '
'reference is missing.'),
UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
'This pull request cannot be updated because the source '
'reference is missing.'),
}
project: added all source files and assets
r1 def __get_pull_request(self, pull_request):
pull-requests: when creating a new version set the created_date to now instead of...
r1191 return self._get_instance((
PullRequest, PullRequestVersion), pull_request)
project: added all source files and assets
r1
def _check_perms(self, perms, pull_request, user, api=False):
if not api:
return h.HasRepoPermissionAny(*perms)(
user=user, repo_name=pull_request.target_repo.repo_name)
else:
return h.HasRepoPermissionAnyApi(*perms)(
user=user, repo_name=pull_request.target_repo.repo_name)
def check_user_read(self, pull_request, user, api=False):
_perms = ('repository.admin', 'repository.write', 'repository.read',)
return self._check_perms(_perms, pull_request, user, api)
def check_user_merge(self, pull_request, user, api=False):
_perms = ('repository.admin', 'repository.write', 'hg.admin',)
return self._check_perms(_perms, pull_request, user, api)
def check_user_update(self, pull_request, user, api=False):
owner = user.user_id == pull_request.user_id
return self.check_user_merge(pull_request, user, api) or owner
pull-requests: moved the delete logic into the show view....
r1085 def check_user_delete(self, pull_request, user):
owner = user.user_id == pull_request.user_id
perms: fixed call to correctly check permissions for admin.
r1375 _perms = ('repository.admin',)
pull-requests: moved the delete logic into the show view....
r1085 return self._check_perms(_perms, pull_request, user) or owner
project: added all source files and assets
r1 def check_user_change_status(self, pull_request, user, api=False):
reviewer = user.user_id in [x.user_id for x in
pull_request.reviewers]
return self.check_user_update(pull_request, user, api) or reviewer
def get(self, pull_request):
return self.__get_pull_request(pull_request)
def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
opened_by=None, order_by=None,
order_dir='desc'):
pull-requests: redo my account pull request page with datagrid. Fixes #4297...
r1084 repo = None
if repo_name:
repo = self._get_repo(repo_name)
project: added all source files and assets
r1 q = PullRequest.query()
pull-requests: redo my account pull request page with datagrid. Fixes #4297...
r1084
project: added all source files and assets
r1 # source or target
pull-requests: redo my account pull request page with datagrid. Fixes #4297...
r1084 if repo and source:
project: added all source files and assets
r1 q = q.filter(PullRequest.source_repo == repo)
pull-requests: redo my account pull request page with datagrid. Fixes #4297...
r1084 elif repo:
project: added all source files and assets
r1 q = q.filter(PullRequest.target_repo == repo)
# closed,opened
if statuses:
q = q.filter(PullRequest.status.in_(statuses))
# opened by filter
if opened_by:
q = q.filter(PullRequest.user_id.in_(opened_by))
if order_by:
order_map = {
'name_raw': PullRequest.pull_request_id,
'title': PullRequest.title,
pull-requests: redo my account pull request page with datagrid. Fixes #4297...
r1084 'updated_on_raw': PullRequest.updated_on,
'target_repo': PullRequest.target_repo_id
project: added all source files and assets
r1 }
if order_dir == 'asc':
q = q.order_by(order_map[order_by].asc())
else:
q = q.order_by(order_map[order_by].desc())
return q
def count_all(self, repo_name, source=False, statuses=None,
opened_by=None):
"""
Count the number of pull requests for a specific repository.
:param repo_name: target or source repo
:param source: boolean flag to specify if repo_name refers to source
:param statuses: list of pull request statuses
:param opened_by: author user of the pull request
:returns: int number of pull requests
"""
q = self._prepare_get_all_query(
repo_name, source=source, statuses=statuses, opened_by=opened_by)
return q.count()
def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
offset=0, length=None, order_by=None, order_dir='desc'):
"""
Get all pull requests for a specific repository.
:param repo_name: target or source repo
:param source: boolean flag to specify if repo_name refers to source
:param statuses: list of pull request statuses
:param opened_by: author user of the pull request
:param offset: pagination offset
:param length: length of returned list
:param order_by: order of the returned list
:param order_dir: 'asc' or 'desc' ordering direction
:returns: list of pull requests
"""
q = self._prepare_get_all_query(
repo_name, source=source, statuses=statuses, opened_by=opened_by,
order_by=order_by, order_dir=order_dir)
if length:
pull_requests = q.limit(length).offset(offset).all()
else:
pull_requests = q.all()
return pull_requests
def count_awaiting_review(self, repo_name, source=False, statuses=None,
opened_by=None):
"""
Count the number of pull requests for a specific repository that are
awaiting review.
:param repo_name: target or source repo
:param source: boolean flag to specify if repo_name refers to source
:param statuses: list of pull request statuses
:param opened_by: author user of the pull request
:returns: int number of pull requests
"""
pull_requests = self.get_awaiting_review(
repo_name, source=source, statuses=statuses, opened_by=opened_by)
return len(pull_requests)
def get_awaiting_review(self, repo_name, source=False, statuses=None,
opened_by=None, offset=0, length=None,
order_by=None, order_dir='desc'):
"""
Get all pull requests for a specific repository that are awaiting
review.
:param repo_name: target or source repo
:param source: boolean flag to specify if repo_name refers to source
:param statuses: list of pull request statuses
:param opened_by: author user of the pull request
:param offset: pagination offset
:param length: length of returned list
:param order_by: order of the returned list
:param order_dir: 'asc' or 'desc' ordering direction
:returns: list of pull requests
"""
pull_requests = self.get_all(
repo_name, source=source, statuses=statuses, opened_by=opened_by,
order_by=order_by, order_dir=order_dir)
_filtered_pull_requests = []
for pr in pull_requests:
status = pr.calculated_review_status()
if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
ChangesetStatus.STATUS_UNDER_REVIEW]:
_filtered_pull_requests.append(pr)
if length:
return _filtered_pull_requests[offset:offset+length]
else:
return _filtered_pull_requests
def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
opened_by=None, user_id=None):
"""
Count the number of pull requests for a specific repository that are
awaiting review from a specific user.
:param repo_name: target or source repo
:param source: boolean flag to specify if repo_name refers to source
:param statuses: list of pull request statuses
:param opened_by: author user of the pull request
:param user_id: reviewer user of the pull request
:returns: int number of pull requests
"""
pull_requests = self.get_awaiting_my_review(
repo_name, source=source, statuses=statuses, opened_by=opened_by,
user_id=user_id)
return len(pull_requests)
def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
opened_by=None, user_id=None, offset=0,
length=None, order_by=None, order_dir='desc'):
"""
Get all pull requests for a specific repository that are awaiting
review from a specific user.
:param repo_name: target or source repo
:param source: boolean flag to specify if repo_name refers to source
:param statuses: list of pull request statuses
:param opened_by: author user of the pull request
:param user_id: reviewer user of the pull request
:param offset: pagination offset
:param length: length of returned list
:param order_by: order of the returned list
:param order_dir: 'asc' or 'desc' ordering direction
:returns: list of pull requests
"""
pull_requests = self.get_all(
repo_name, source=source, statuses=statuses, opened_by=opened_by,
order_by=order_by, order_dir=order_dir)
_my = PullRequestModel().get_not_reviewed(user_id)
my_participation = []
for pr in pull_requests:
if pr in _my:
my_participation.append(pr)
_filtered_pull_requests = my_participation
if length:
return _filtered_pull_requests[offset:offset+length]
else:
return _filtered_pull_requests
def get_not_reviewed(self, user_id):
return [
x.pull_request for x in PullRequestReviewers.query().filter(
PullRequestReviewers.user_id == user_id).all()
]
pull-requests: redo my account pull request page with datagrid. Fixes #4297...
r1084 def _prepare_participating_query(self, user_id=None, statuses=None,
order_by=None, order_dir='desc'):
q = PullRequest.query()
if user_id:
reviewers_subquery = Session().query(
PullRequestReviewers.pull_request_id).filter(
PullRequestReviewers.user_id == user_id).subquery()
user_filter= or_(
PullRequest.user_id == user_id,
PullRequest.pull_request_id.in_(reviewers_subquery)
)
q = PullRequest.query().filter(user_filter)
# closed,opened
if statuses:
q = q.filter(PullRequest.status.in_(statuses))
if order_by:
order_map = {
'name_raw': PullRequest.pull_request_id,
'title': PullRequest.title,
'updated_on_raw': PullRequest.updated_on,
'target_repo': PullRequest.target_repo_id
}
if order_dir == 'asc':
q = q.order_by(order_map[order_by].asc())
else:
q = q.order_by(order_map[order_by].desc())
return q
def count_im_participating_in(self, user_id=None, statuses=None):
q = self._prepare_participating_query(user_id, statuses=statuses)
return q.count()
def get_im_participating_in(
self, user_id=None, statuses=None, offset=0,
length=None, order_by=None, order_dir='desc'):
"""
Get all Pull requests that i'm participating in, or i have opened
"""
q = self._prepare_participating_query(
user_id, statuses=statuses, order_by=order_by,
order_dir=order_dir)
if length:
pull_requests = q.limit(length).offset(offset).all()
else:
pull_requests = q.all()
return pull_requests
project: added all source files and assets
r1 def get_versions(self, pull_request):
"""
returns version of pull request sorted by ID descending
"""
return PullRequestVersion.query()\
.filter(PullRequestVersion.pull_request == pull_request)\
.order_by(PullRequestVersion.pull_request_version_id.asc())\
.all()
def create(self, created_by, source_repo, source_ref, target_repo,
pull-request: extended default reviewers functionality....
r1769 target_ref, revisions, reviewers, title, description=None,
reviewer_data=None):
project: added all source files and assets
r1 created_by_user = self._get_user(created_by)
source_repo = self._get_repo(source_repo)
target_repo = self._get_repo(target_repo)
pull_request = PullRequest()
pull_request.source_repo = source_repo
pull_request.source_ref = source_ref
pull_request.target_repo = target_repo
pull_request.target_ref = target_ref
pull_request.revisions = revisions
pull_request.title = title
pull_request.description = description
pull_request.author = created_by_user
pull-request: extended default reviewers functionality....
r1769 pull_request.reviewer_data = reviewer_data
project: added all source files and assets
r1
Session().add(pull_request)
Session().flush()
dan
reviewers: store reviewer reasons to database, fixes #4238
r873 reviewer_ids = set()
project: added all source files and assets
r1 # members / reviewers
dan
reviewers: store reviewer reasons to database, fixes #4238
r873 for reviewer_object in reviewers:
pull-request: extended default reviewers functionality....
r1769 user_id, reasons, mandatory = reviewer_object
dan
pull-requests: make sure to skip duplicates of reviewers via PR create method.
r1793 user = self._get_user(user_id)
dan
reviewers: store reviewer reasons to database, fixes #4238
r873
dan
pull-requests: make sure to skip duplicates of reviewers via PR create method.
r1793 # skip duplicates
if user.user_id in reviewer_ids:
continue
dan
reviewers: store reviewer reasons to database, fixes #4238
r873 reviewer_ids.add(user.user_id)
pull-request: extended default reviewers functionality....
r1769 reviewer = PullRequestReviewers()
reviewer.user = user
reviewer.pull_request = pull_request
reviewer.reasons = reasons
reviewer.mandatory = mandatory
project: added all source files and assets
r1 Session().add(reviewer)
# Set approval status to "Under Review" for all commits which are
# part of this pull request.
ChangesetStatusModel().set_status(
repo=target_repo,
status=ChangesetStatus.STATUS_UNDER_REVIEW,
user=created_by_user,
pull_request=pull_request
)
dan
reviewers: store reviewer reasons to database, fixes #4238
r873 self.notify_reviewers(pull_request, reviewer_ids)
project: added all source files and assets
r1 self._trigger_pull_request_hook(
pull_request, created_by_user, 'create')
audit-logs: implemented pull request and comment events.
r1807 creation_data = pull_request.get_api_data(with_merge_state=False)
self._log_audit_action(
'repo.pull_request.create', {'data': creation_data},
created_by_user, pull_request)
project: added all source files and assets
r1 return pull_request
def _trigger_pull_request_hook(self, pull_request, user, action):
pull_request = self.__get_pull_request(pull_request)
target_scm = pull_request.target_repo.scm_instance()
if action == 'create':
trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
elif action == 'merge':
trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
elif action == 'close':
trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
elif action == 'review_status_change':
trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
elif action == 'update':
trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
else:
return
trigger_hook(
username=user.username,
repo_name=pull_request.target_repo.repo_name,
repo_alias=target_scm.alias,
pull_request=pull_request)
def _get_commit_ids(self, pull_request):
"""
Return the commit ids of the merged pull request.
This method is not dealing correctly yet with the lack of autoupdates
nor with the implicit target updates.
For example: if a commit in the source repo is already in the target it
will be reported anyways.
"""
merge_rev = pull_request.merge_rev
if merge_rev is None:
raise ValueError('This pull request was not merged yet')
commit_ids = list(pull_request.revisions)
if merge_rev not in commit_ids:
commit_ids.append(merge_rev)
return commit_ids
def merge(self, pull_request, user, extras):
pull_request: Increase debug logging around merge.
r149 log.debug("Merging pull request %s", pull_request.pull_request_id)
project: added all source files and assets
r1 merge_state = self._merge_pull_request(pull_request, user, extras)
if merge_state.executed:
pull_request: Increase debug logging around merge.
r149 log.debug(
"Merge was successful, updating the pull request comments.")
project: added all source files and assets
r1 self._comment_and_close_pr(pull_request, user, merge_state)
audit-logs: implemented pull request and comment events.
r1807
self._log_audit_action(
'repo.pull_request.merge',
{'merge_state': merge_state.__dict__},
user, pull_request)
pull_request: Increase debug logging around merge.
r149 else:
log.warn("Merge failed, not updating the pull request.")
project: added all source files and assets
r1 return merge_state
def _merge_pull_request(self, pull_request, user, extras):
target_vcs = pull_request.target_repo.scm_instance()
source_vcs = pull_request.source_repo.scm_instance()
target_ref = self._refresh_reference(
pull_request.target_ref_parts, target_vcs)
message = _(
'Merge pull request #%(pr_id)s from '
'%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
'pr_id': pull_request.pull_request_id,
'source_repo': source_vcs.name,
'source_ref_name': pull_request.source_ref_parts.name,
'pr_title': pull_request.title
}
workspace_id = self._workspace_id(pull_request)
Martin Bornhold
settings: Read 'rebase-merge' config option and pass it to the VCS insntacen on merge.
r361 use_rebase = self._use_rebase_for_merging(pull_request)
Mathieu Cantin
mercurial: Add option to close a branch before merging
r2055 close_branch = self._close_branch_before_merging(pull_request)
project: added all source files and assets
r1
callback_daemon, extras = prepare_callback_daemon(
Martin Bornhold
config: Use hooks protocol and direc calls seting from vcs settings module.
r590 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
project: added all source files and assets
r1
with callback_daemon:
# TODO: johbo: Implement a clean way to run a config_override
# for a single call.
target_vcs.config.set(
'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
merge_state = target_vcs.merge(
target_ref, source_vcs, pull_request.source_ref_parts,
workspace_id, user_name=user.username,
Mathieu Cantin
mercurial: Add option to close a branch before merging
r2055 user_email=user.email, message=message, use_rebase=use_rebase,
close_branch=close_branch)
project: added all source files and assets
r1 return merge_state
def _comment_and_close_pr(self, pull_request, user, merge_state):
Martin Bornhold
pr-shadow: Adapt to new merge response object.
r1052 pull_request.merge_rev = merge_state.merge_ref.commit_id
project: added all source files and assets
r1 pull_request.updated_on = datetime.datetime.now()
comments: renamed ChangesetCommentsModel to CommentsModel to reflect what it actually does....
r1323 CommentsModel().create(
project: added all source files and assets
r1 text=unicode(_('Pull request merged and closed')),
repo=pull_request.target_repo.repo_id,
user=user.user_id,
pull_request=pull_request.pull_request_id,
f_path=None,
line_no=None,
closing_pr=True
)
Session().add(pull_request)
Session().flush()
# TODO: paris: replace invalidation with less radical solution
ScmModel().mark_for_invalidation(
pull_request.target_repo.repo_name)
self._trigger_pull_request_hook(pull_request, user, 'merge')
def has_valid_update_type(self, pull_request):
source_ref_type = pull_request.source_ref_parts.type
return source_ref_type in ['book', 'branch', 'tag']
def update_commits(self, pull_request):
"""
Get the updated list of commits for the pull request
and return the new pull request version and the list
of commits processed by this update action
"""
pull_request = self.__get_pull_request(pull_request)
source_ref_type = pull_request.source_ref_parts.type
source_ref_name = pull_request.source_ref_parts.name
source_ref_id = pull_request.source_ref_parts.commit_id
pull-request: force update pull-request in case of the target repo reference changes....
r1595 target_ref_type = pull_request.target_ref_parts.type
target_ref_name = pull_request.target_ref_parts.name
target_ref_id = pull_request.target_ref_parts.commit_id
project: added all source files and assets
r1 if not self.has_valid_update_type(pull_request):
log.debug(
"Skipping update of pull request %s due to ref type: %s",
pull_request, source_ref_type)
Martin Bornhold
pr: Return update response objects instead of tuples.
r1074 return UpdateResponse(
Martin Bornhold
pr: Rename update response flag `success` -> `executed`...
r1083 executed=False,
pull-request: fixed typo in wrong ref type error, and added which...
r1687 reason=UpdateFailureReason.WRONG_REF_TYPE,
pull-requests: change the update commits logic to handle target changes better....
r1601 old=pull_request, new=None, changes=None,
source_changed=False, target_changed=False)
project: added all source files and assets
r1
pull-request: force update pull-request in case of the target repo reference changes....
r1595 # source repo
project: added all source files and assets
r1 source_repo = pull_request.source_repo.scm_instance()
Martin Bornhold
pr: Catch errors if target or source reference are missing during commit update. #3950
r1075 try:
source_commit = source_repo.get_commit(commit_id=source_ref_name)
except CommitDoesNotExistError:
return UpdateResponse(
Martin Bornhold
pr: Rename update response flag `success` -> `executed`...
r1083 executed=False,
Martin Bornhold
pr: Catch errors if target or source reference are missing during commit update. #3950
r1075 reason=UpdateFailureReason.MISSING_SOURCE_REF,
pull-requests: change the update commits logic to handle target changes better....
r1601 old=pull_request, new=None, changes=None,
source_changed=False, target_changed=False)
Martin Bornhold
pr: Catch errors if target or source reference are missing during commit update. #3950
r1075
pull-request: force update pull-request in case of the target repo reference changes....
r1595 source_changed = source_ref_id != source_commit.raw_id
# target repo
target_repo = pull_request.target_repo.scm_instance()
try:
target_commit = target_repo.get_commit(commit_id=target_ref_name)
except CommitDoesNotExistError:
return UpdateResponse(
executed=False,
reason=UpdateFailureReason.MISSING_TARGET_REF,
pull-requests: change the update commits logic to handle target changes better....
r1601 old=pull_request, new=None, changes=None,
source_changed=False, target_changed=False)
pull-request: force update pull-request in case of the target repo reference changes....
r1595 target_changed = target_ref_id != target_commit.raw_id
if not (source_changed or target_changed):
project: added all source files and assets
r1 log.debug("Nothing changed in pull request %s", pull_request)
Martin Bornhold
pr: Return update response objects instead of tuples.
r1074 return UpdateResponse(
Martin Bornhold
pr: Rename update response flag `success` -> `executed`...
r1083 executed=False,
Martin Bornhold
pr: Return update response objects instead of tuples.
r1074 reason=UpdateFailureReason.NO_CHANGE,
pull-requests: change the update commits logic to handle target changes better....
r1601 old=pull_request, new=None, changes=None,
source_changed=target_changed, target_changed=source_changed)
project: added all source files and assets
r1
pull-request: force update pull-request in case of the target repo reference changes....
r1595 change_in_found = 'target repo' if target_changed else 'source repo'
log.debug('Updating pull request because of change in %s detected',
change_in_found)
project: added all source files and assets
r1
pull-request: force update pull-request in case of the target repo reference changes....
r1595 # Finally there is a need for an update, in case of source change
# we create a new version, else just an update
if source_changed:
pull_request_version = self._create_version_from_snapshot(pull_request)
self._link_comments_to_version(pull_request_version)
else:
pull-requests: updates on pull requests that don't have versions shouldn't...
r1596 try:
ver = pull_request.versions[-1]
except IndexError:
ver = None
pull-request: force update pull-request in case of the target repo reference changes....
r1595 pull_request.pull_request_version_id = \
ver.pull_request_version_id if ver else None
pull_request_version = pull_request
project: added all source files and assets
r1
Martin Bornhold
pr: Catch errors if target or source reference are missing during commit update. #3950
r1075 try:
if target_ref_type in ('tag', 'branch', 'book'):
target_commit = target_repo.get_commit(target_ref_name)
else:
target_commit = target_repo.get_commit(target_ref_id)
except CommitDoesNotExistError:
return UpdateResponse(
Martin Bornhold
pr: Rename update response flag `success` -> `executed`...
r1083 executed=False,
Martin Bornhold
pr: Catch errors if target or source reference are missing during commit update. #3950
r1075 reason=UpdateFailureReason.MISSING_TARGET_REF,
pull-requests: change the update commits logic to handle target changes better....
r1601 old=pull_request, new=None, changes=None,
source_changed=source_changed, target_changed=target_changed)
project: added all source files and assets
r1
# re-compute commit ids
pr-model: don't use set to calculate commit ranges as they generate random order.
r1372 old_commit_ids = pull_request.revisions
project: added all source files and assets
r1 pre_load = ["author", "branch", "date", "message"]
commit_ranges = target_repo.compare(
target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
pre_load=pre_load)
ancestor = target_repo.get_common_ancestor(
target_commit.raw_id, source_commit.raw_id, source_repo)
pull_request.source_ref = '%s:%s:%s' % (
source_ref_type, source_ref_name, source_commit.raw_id)
pull_request.target_ref = '%s:%s:%s' % (
target_ref_type, target_ref_name, ancestor)
pull-request: force update pull-request in case of the target repo reference changes....
r1595
project: added all source files and assets
r1 pull_request.revisions = [
commit.raw_id for commit in reversed(commit_ranges)]
pull_request.updated_on = datetime.datetime.now()
Session().add(pull_request)
pr-model: don't use set to calculate commit ranges as they generate random order.
r1372 new_commit_ids = pull_request.revisions
project: added all source files and assets
r1
old_diff_data, new_diff_data = self._generate_update_diffs(
pull_request, pull_request_version)
pull-requests: change the update commits logic to handle target changes better....
r1601 # calculate commit and file changes
changes = self._calculate_commit_id_changes(
old_commit_ids, new_commit_ids)
file_changes = self._calculate_file_changes(
old_diff_data, new_diff_data)
# set comments as outdated if DIFFS changed
comments: renamed ChangesetCommentsModel to CommentsModel to reflect what it actually does....
r1323 CommentsModel().outdate_comments(
project: added all source files and assets
r1 pull_request, old_diff_data=old_diff_data,
new_diff_data=new_diff_data)
pull-requests: change the update commits logic to handle target changes better....
r1601 commit_changes = (changes.added or changes.removed)
file_node_changes = (
file_changes.added or file_changes.modified or file_changes.removed)
pr_has_changes = commit_changes or file_node_changes
project: added all source files and assets
r1
pull-requests: change the update commits logic to handle target changes better....
r1601 # Add an automatic comment to the pull request, in case
# anything has changed
if pr_has_changes:
update_comment = CommentsModel().create(
text=self._render_update_message(changes, file_changes),
repo=pull_request.target_repo,
project: added all source files and assets
r1 user=pull_request.author,
pull_request=pull_request,
pull-requests: change the update commits logic to handle target changes better....
r1601 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
# Update status to "Under Review" for added commits
for commit_id in changes.added:
ChangesetStatusModel().set_status(
repo=pull_request.source_repo,
status=ChangesetStatus.STATUS_UNDER_REVIEW,
comment=update_comment,
user=pull_request.author,
pull_request=pull_request,
revision=commit_id)
project: added all source files and assets
r1
log.debug(
'Updated pull request %s, added_ids: %s, common_ids: %s, '
'removed_ids: %s', pull_request.pull_request_id,
changes.added, changes.common, changes.removed)
pull-requests: change the update commits logic to handle target changes better....
r1601 log.debug(
'Updated pull request with the following file changes: %s',
file_changes)
project: added all source files and assets
r1
log.info(
"Updated pull request %s from commit %s to commit %s, "
"stored new version %s of this pull request.",
pull_request.pull_request_id, source_ref_id,
pull_request.source_ref_parts.commit_id,
pull_request_version.pull_request_version_id)
Session().commit()
pull-requests: change the update commits logic to handle target changes better....
r1601 self._trigger_pull_request_hook(
pull_request, pull_request.author, 'update')
dan
reviewers: store reviewer reasons to database, fixes #4238
r873
Martin Bornhold
pr: Return update response objects instead of tuples.
r1074 return UpdateResponse(
Martin Bornhold
pr: Rename update response flag `success` -> `executed`...
r1083 executed=True, reason=UpdateFailureReason.NONE,
pull-requests: change the update commits logic to handle target changes better....
r1601 old=pull_request, new=pull_request_version, changes=changes,
source_changed=source_changed, target_changed=target_changed)
project: added all source files and assets
r1
def _create_version_from_snapshot(self, pull_request):
version = PullRequestVersion()
version.title = pull_request.title
version.description = pull_request.description
version.status = pull_request.status
pull-requests: when creating a new version set the created_date to now instead of...
r1207 version.created_on = datetime.datetime.now()
project: added all source files and assets
r1 version.updated_on = pull_request.updated_on
version.user_id = pull_request.user_id
version.source_repo = pull_request.source_repo
version.source_ref = pull_request.source_ref
version.target_repo = pull_request.target_repo
version.target_ref = pull_request.target_ref
version._last_merge_source_rev = pull_request._last_merge_source_rev
version._last_merge_target_rev = pull_request._last_merge_target_rev
db: use a wrapper on pull requests _last_merge_status to ensure this is always INT....
r1968 version.last_merge_status = pull_request.last_merge_status
Martin Bornhold
pr-shadow: Adapt to new merge response object.
r1052 version.shadow_merge_ref = pull_request.shadow_merge_ref
project: added all source files and assets
r1 version.merge_rev = pull_request.merge_rev
pull-request: extended default reviewers functionality....
r1769 version.reviewer_data = pull_request.reviewer_data
project: added all source files and assets
r1
version.revisions = pull_request.revisions
version.pull_request = pull_request
Session().add(version)
Session().flush()
return version
def _generate_update_diffs(self, pull_request, pull_request_version):
pr-versioning: implemented versioning for pull requests....
r1368
project: added all source files and assets
r1 diff_context = (
self.DIFF_CONTEXT +
comments: renamed ChangesetCommentsModel to CommentsModel to reflect what it actually does....
r1323 CommentsModel.needed_extra_diff_context())
pr-versioning: implemented versioning for pull requests....
r1368
source_repo = pull_request_version.source_repo
source_ref_id = pull_request_version.source_ref_parts.commit_id
target_ref_id = pull_request_version.target_ref_parts.commit_id
project: added all source files and assets
r1 old_diff = self._get_diff_from_pr_or_version(
pr-versioning: implemented versioning for pull requests....
r1368 source_repo, source_ref_id, target_ref_id, context=diff_context)
source_repo = pull_request.source_repo
source_ref_id = pull_request.source_ref_parts.commit_id
target_ref_id = pull_request.target_ref_parts.commit_id
project: added all source files and assets
r1 new_diff = self._get_diff_from_pr_or_version(
pr-versioning: implemented versioning for pull requests....
r1368 source_repo, source_ref_id, target_ref_id, context=diff_context)
project: added all source files and assets
r1
old_diff_data = diffs.DiffProcessor(old_diff)
old_diff_data.prepare()
new_diff_data = diffs.DiffProcessor(new_diff)
new_diff_data.prepare()
return old_diff_data, new_diff_data
def _link_comments_to_version(self, pull_request_version):
"""
Link all unlinked comments of this pull request to the given version.
:param pull_request_version: The `PullRequestVersion` to which
the comments shall be linked.
"""
pull_request = pull_request_version.pull_request
pull-requests: make sure we process comments in the order of IDS when...
r1705 comments = ChangesetComment.query()\
.filter(
# TODO: johbo: Should we query for the repo at all here?
# Pending decision on how comments of PRs are to be related
# to either the source repo, the target repo or no repo at all.
ChangesetComment.repo_id == pull_request.target_repo.repo_id,
ChangesetComment.pull_request == pull_request,
ChangesetComment.pull_request_version == None)\
.order_by(ChangesetComment.comment_id.asc())
project: added all source files and assets
r1
# TODO: johbo: Find out why this breaks if it is done in a bulk
# operation.
for comment in comments:
comment.pull_request_version_id = (
pull_request_version.pull_request_version_id)
Session().add(comment)
def _calculate_commit_id_changes(self, old_ids, new_ids):
pr-versioning: implemented versioning for pull requests....
r1368 added = [x for x in new_ids if x not in old_ids]
common = [x for x in new_ids if x in old_ids]
removed = [x for x in old_ids if x not in new_ids]
total = new_ids
return ChangeTuple(added, common, removed, total)
project: added all source files and assets
r1
def _calculate_file_changes(self, old_diff_data, new_diff_data):
old_files = OrderedDict()
for diff_data in old_diff_data.parsed_diff:
old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
added_files = []
modified_files = []
removed_files = []
for diff_data in new_diff_data.parsed_diff:
new_filename = diff_data['filename']
new_hash = md5_safe(diff_data['raw_diff'])
old_hash = old_files.get(new_filename)
if not old_hash:
# file is not present in old diff, means it's added
added_files.append(new_filename)
else:
if new_hash != old_hash:
modified_files.append(new_filename)
# now remove a file from old, since we have seen it already
del old_files[new_filename]
# removed files is when there are present in old, but not in NEW,
# since we remove old files that are present in new diff, left-overs
# if any should be the removed files
removed_files.extend(old_files.keys())
return FileChangeTuple(added_files, modified_files, removed_files)
def _render_update_message(self, changes, file_changes):
"""
render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
so it's always looking the same disregarding on which default
renderer system is using.
:param changes: changes named tuple
:param file_changes: file changes named tuple
"""
new_status = ChangesetStatus.get_status_lbl(
ChangesetStatus.STATUS_UNDER_REVIEW)
changed_files = (
file_changes.added + file_changes.modified + file_changes.removed)
params = {
'under_review_label': new_status,
'added_commits': changes.added,
'removed_commits': changes.removed,
'changed_files': changed_files,
'added_files': file_changes.added,
'modified_files': file_changes.modified,
'removed_files': file_changes.removed,
}
renderer = RstTemplateRenderer()
return renderer.render('pull_request_update.mako', **params)
audit-logs: implemented pull request and comment events.
r1807 def edit(self, pull_request, title, description, user):
project: added all source files and assets
r1 pull_request = self.__get_pull_request(pull_request)
audit-logs: implemented pull request and comment events.
r1807 old_data = pull_request.get_api_data(with_merge_state=False)
project: added all source files and assets
r1 if pull_request.is_closed():
raise ValueError('This pull request is closed')
if title:
pull_request.title = title
pull_request.description = description
pull_request.updated_on = datetime.datetime.now()
Session().add(pull_request)
audit-logs: implemented pull request and comment events.
r1807 self._log_audit_action(
'repo.pull_request.edit', {'old_data': old_data},
user, pull_request)
project: added all source files and assets
r1
audit-logs: implemented pull request and comment events.
r1807 def update_reviewers(self, pull_request, reviewer_data, user):
dan
reviewers: store reviewer reasons to database, fixes #4238
r873 """
Update the reviewers in the pull request
:param pull_request: the pr to update
pull-request: extended default reviewers functionality....
r1769 :param reviewer_data: list of tuples
[(user, ['reason1', 'reason2'], mandatory_flag)]
dan
reviewers: store reviewer reasons to database, fixes #4238
r873 """
pull-request: extended default reviewers functionality....
r1769 reviewers = {}
for user_id, reasons, mandatory in reviewer_data:
dan
reviewers: store reviewer reasons to database, fixes #4238
r873 if isinstance(user_id, (int, basestring)):
user_id = self._get_user(user_id).user_id
pull-request: extended default reviewers functionality....
r1769 reviewers[user_id] = {
'reasons': reasons, 'mandatory': mandatory}
dan
reviewers: store reviewer reasons to database, fixes #4238
r873
pull-request: extended default reviewers functionality....
r1769 reviewers_ids = set(reviewers.keys())
project: added all source files and assets
r1 pull_request = self.__get_pull_request(pull_request)
current_reviewers = PullRequestReviewers.query()\
.filter(PullRequestReviewers.pull_request ==
pull_request).all()
current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
ids_to_add = reviewers_ids.difference(current_reviewers_ids)
ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
log.debug("Adding %s reviewers", ids_to_add)
log.debug("Removing %s reviewers", ids_to_remove)
changed = False
for uid in ids_to_add:
changed = True
_usr = self._get_user(uid)
pull-request: extended default reviewers functionality....
r1769 reviewer = PullRequestReviewers()
reviewer.user = _usr
reviewer.pull_request = pull_request
reviewer.reasons = reviewers[uid]['reasons']
# NOTE(marcink): mandatory shouldn't be changed now
audit-logs: implemented pull request and comment events.
r1807 # reviewer.mandatory = reviewers[uid]['reasons']
project: added all source files and assets
r1 Session().add(reviewer)
audit-logs: implemented pull request and comment events.
r1807 self._log_audit_action(
'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
user, pull_request)
project: added all source files and assets
r1
for uid in ids_to_remove:
changed = True
pull-request: lock button when updating reviewers to forbid multi-submit....
r1578 reviewers = PullRequestReviewers.query()\
project: added all source files and assets
r1 .filter(PullRequestReviewers.user_id == uid,
PullRequestReviewers.pull_request == pull_request)\
pull-request: lock button when updating reviewers to forbid multi-submit....
r1578 .all()
# use .all() in case we accidentally added the same person twice
# this CAN happen due to the lack of DB checks
for obj in reviewers:
audit-logs: implemented pull request and comment events.
r1807 old_data = obj.get_dict()
pull-request: lock button when updating reviewers to forbid multi-submit....
r1578 Session().delete(obj)
audit-logs: implemented pull request and comment events.
r1807 self._log_audit_action(
'repo.pull_request.reviewer.delete',
{'old_data': old_data}, user, pull_request)
pull-request: lock button when updating reviewers to forbid multi-submit....
r1578
project: added all source files and assets
r1 if changed:
pull_request.updated_on = datetime.datetime.now()
Session().add(pull_request)
pull-request: lock button when updating reviewers to forbid multi-submit....
r1578 self.notify_reviewers(pull_request, ids_to_add)
project: added all source files and assets
r1 return ids_to_add, ids_to_remove
events: expose permalink urls for different set of object....
r1788 def get_url(self, pull_request, request=None, permalink=False):
if not request:
request = get_current_request()
if permalink:
return request.route_url(
'pull_requests_global',
pull_request_id=pull_request.pull_request_id,)
else:
pull-requests: prepare the migration of pull request to pyramid....
r1813 return request.route_url('pullrequest_show',
events: expose permalink urls for different set of object....
r1788 repo_name=safe_str(pull_request.target_repo.repo_name),
pull_request_id=pull_request.pull_request_id,)
dan
events: add serialization .to_dict() to events based on marshmallow
r379
Martin Bornhold
pr: Display link to shadow repository on pull request page.
r896 def get_shadow_clone_url(self, pull_request):
Martin Bornhold
pr: Unify clone url generation of shadow repository.
r897 """
Returns qualified url pointing to the shadow repository. If this pull
request is closed there is no shadow repository and ``None`` will be
returned.
"""
if pull_request.is_closed():
return None
else:
Martin Bornhold
shadow: Remove URL quoting of shadow repository pull URL.
r917 pr_url = urllib.unquote(self.get_url(pull_request))
return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
Martin Bornhold
pr: Display link to shadow repository on pull request page.
r896
project: added all source files and assets
r1 def notify_reviewers(self, pull_request, reviewers_ids):
# notification to reviewers
if not reviewers_ids:
return
pull_request_obj = pull_request
# get the current participants of this pull request
recipients = reviewers_ids
notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
pr_source_repo = pull_request_obj.source_repo
pr_target_repo = pull_request_obj.target_repo
pull-requests: prepare the migration of pull request to pyramid....
r1813 pr_url = h.route_url('pullrequest_show',
project: added all source files and assets
r1 repo_name=pr_target_repo.repo_name,
pull-requests: prepare the migration of pull request to pyramid....
r1813 pull_request_id=pull_request_obj.pull_request_id,)
project: added all source files and assets
r1
# set some variables for email notification
repo-summary: re-implemented summary view as pyramid....
r1785 pr_target_repo_url = h.route_url(
'repo_summary', repo_name=pr_target_repo.repo_name)
project: added all source files and assets
r1
repo-summary: re-implemented summary view as pyramid....
r1785 pr_source_repo_url = h.route_url(
'repo_summary', repo_name=pr_source_repo.repo_name)
project: added all source files and assets
r1
# pull request specifics
pull_request_commits = [
(x.raw_id, x.message)
for x in map(pr_source_repo.get_commit, pull_request.revisions)]
kwargs = {
'user': pull_request.author,
'pull_request': pull_request_obj,
'pull_request_commits': pull_request_commits,
'pull_request_target_repo': pr_target_repo,
'pull_request_target_repo_url': pr_target_repo_url,
'pull_request_source_repo': pr_source_repo,
'pull_request_source_repo_url': pr_source_repo_url,
'pull_request_url': pr_url,
}
# pre-generate the subject for notification itself
(subject,
_h, _e, # we don't care about those
body_plaintext) = EmailNotificationModel().render_email(
notification_type, **kwargs)
# create notification objects, and emails
NotificationModel().create(
created_by=pull_request.author,
notification_subject=subject,
notification_body=body_plaintext,
notification_type=notification_type,
recipients=recipients,
email_kwargs=kwargs,
)
audit-logs: implemented pull request and comment events.
r1807 def delete(self, pull_request, user):
project: added all source files and assets
r1 pull_request = self.__get_pull_request(pull_request)
audit-logs: implemented pull request and comment events.
r1807 old_data = pull_request.get_api_data(with_merge_state=False)
project: added all source files and assets
r1 self._cleanup_merge_workspace(pull_request)
audit-logs: implemented pull request and comment events.
r1807 self._log_audit_action(
'repo.pull_request.delete', {'old_data': old_data},
user, pull_request)
project: added all source files and assets
r1 Session().delete(pull_request)
def close_pull_request(self, pull_request, user):
pull_request = self.__get_pull_request(pull_request)
self._cleanup_merge_workspace(pull_request)
pull_request.status = PullRequest.STATUS_CLOSED
pull_request.updated_on = datetime.datetime.now()
Session().add(pull_request)
self._trigger_pull_request_hook(
pull_request, pull_request.author, 'close')
pull-request-events: add audit data for pull_request.close action
r2082
pr_data = pull_request.get_api_data(with_merge_state=False)
audit-logs: implemented pull request and comment events.
r1807 self._log_audit_action(
pull-request-events: add audit data for pull_request.close action
r2082 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
project: added all source files and assets
r1
pull-request-api: updated logic of closing a PR via API call....
r1792 def close_pull_request_with_comment(
self, pull_request, user, repo, message=None):
pull_request_review_status = pull_request.calculated_review_status()
project: added all source files and assets
r1
pull-request-api: updated logic of closing a PR via API call....
r1792 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
# approved only if we have voting consent
status = ChangesetStatus.STATUS_APPROVED
else:
status = ChangesetStatus.STATUS_REJECTED
status_lbl = ChangesetStatus.get_status_lbl(status)
project: added all source files and assets
r1
pull-request-api: updated logic of closing a PR via API call....
r1792 default_message = (
_('Closing with status change {transition_icon} {status}.')
).format(transition_icon='>', status=status_lbl)
text = message or default_message
project: added all source files and assets
r1
pull-request-api: updated logic of closing a PR via API call....
r1792 # create a comment, and link it to new status
comment = CommentsModel().create(
text=text,
project: added all source files and assets
r1 repo=repo.repo_id,
user=user.user_id,
pull_request=pull_request.pull_request_id,
pull-request-api: updated logic of closing a PR via API call....
r1792 status_change=status_lbl,
emails: added new tags to status sent...
r548 status_change_type=status,
project: added all source files and assets
r1 closing_pr=True
)
pull-request-api: updated logic of closing a PR via API call....
r1792 # calculate old status before we change it
old_calculated_status = pull_request.calculated_review_status()
project: added all source files and assets
r1 ChangesetStatusModel().set_status(
repo.repo_id,
status,
user.user_id,
pull-request-api: updated logic of closing a PR via API call....
r1792 comment=comment,
project: added all source files and assets
r1 pull_request=pull_request.pull_request_id
)
pull-request-api: updated logic of closing a PR via API call....
r1792
project: added all source files and assets
r1 Session().flush()
pull-request-api: updated logic of closing a PR via API call....
r1792 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
# we now calculate the status of pull request again, and based on that
# calculation trigger status change. This might happen in cases
# that non-reviewer admin closes a pr, which means his vote doesn't
# change the status, while if he's a reviewer this might change it.
calculated_status = pull_request.calculated_review_status()
if old_calculated_status != calculated_status:
self._trigger_pull_request_hook(
pull_request, user, 'review_status_change')
project: added all source files and assets
r1
pull-request-api: updated logic of closing a PR via API call....
r1792 # finally close the PR
project: added all source files and assets
r1 PullRequestModel().close_pull_request(
pull_request.pull_request_id, user)
pull-request-api: updated logic of closing a PR via API call....
r1792 return comment, status
project: added all source files and assets
r1 def merge_status(self, pull_request):
if not self._is_merge_enabled(pull_request):
return False, _('Server-side pull request merging is disabled.')
if pull_request.is_closed():
return False, _('This pull request is closed.')
merge_possible, msg = self._check_repo_requirements(
target=pull_request.target_repo, source=pull_request.source_repo)
if not merge_possible:
return merge_possible, msg
try:
resp = self._try_merge(pull_request)
Martin Bornhold
pr: Move log statement to allow early return and still log merge response.
r1070 log.debug("Merge response: %s", resp)
project: added all source files and assets
r1 status = resp.possible, self.merge_status_message(
resp.failure_reason)
except NotImplementedError:
status = False, _('Pull request merging is not supported.')
return status
def _check_repo_requirements(self, target, source):
"""
Check if `target` and `source` have compatible requirements.
Currently this is just checking for largefiles.
"""
target_has_largefiles = self._has_largefiles(target)
source_has_largefiles = self._has_largefiles(source)
merge_possible = True
message = u''
if target_has_largefiles != source_has_largefiles:
merge_possible = False
if source_has_largefiles:
message = _(
'Target repository large files support is disabled.')
else:
message = _(
'Source repository large files support is disabled.')
return merge_possible, message
def _has_largefiles(self, repo):
largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
'extensions', 'largefiles')
return largefiles_ui and largefiles_ui[0].active
def _try_merge(self, pull_request):
"""
Try to merge the pull request and return the merge status.
"""
pull_request: Add debug logging around merge status calculation...
r141 log.debug(
"Trying out if the pull request %s can be merged.",
pull_request.pull_request_id)
project: added all source files and assets
r1 target_vcs = pull_request.target_repo.scm_instance()
Martin Bornhold
pr: Return proper merge response if target reference is missing.
r1071
# Refresh the target reference.
try:
target_ref = self._refresh_reference(
pull_request.target_ref_parts, target_vcs)
except CommitDoesNotExistError:
merge_state = MergeResponse(
False, False, None, MergeFailureReason.MISSING_TARGET_REF)
return merge_state
project: added all source files and assets
r1
target_locked = pull_request.target_repo.locked
if target_locked and target_locked[0]:
pull_request: Add debug logging around merge status calculation...
r141 log.debug("The target repository is locked.")
project: added all source files and assets
r1 merge_state = MergeResponse(
False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
elif self._needs_merge_state_refresh(pull_request, target_ref):
pull_request: Add debug logging around merge status calculation...
r141 log.debug("Refreshing the merge status of the repository.")
project: added all source files and assets
r1 merge_state = self._refresh_merge_state(
pull_request, target_vcs, target_ref)
else:
possible = pull_request.\
db: use a wrapper on pull requests _last_merge_status to ensure this is always INT....
r1968 last_merge_status == MergeFailureReason.NONE
project: added all source files and assets
r1 merge_state = MergeResponse(
db: use a wrapper on pull requests _last_merge_status to ensure this is always INT....
r1968 possible, False, None, pull_request.last_merge_status)
Martin Bornhold
pr: Move log statement to allow early return and still log merge response.
r1070
project: added all source files and assets
r1 return merge_state
def _refresh_reference(self, reference, vcs_repository):
if reference.type in ('branch', 'book'):
name_or_id = reference.name
else:
name_or_id = reference.commit_id
refreshed_commit = vcs_repository.get_commit(name_or_id)
refreshed_reference = Reference(
reference.type, reference.name, refreshed_commit.raw_id)
return refreshed_reference
def _needs_merge_state_refresh(self, pull_request, target_reference):
return not(
pull_request.revisions and
pull_request.revisions[0] == pull_request._last_merge_source_rev and
target_reference.commit_id == pull_request._last_merge_target_rev)
def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
workspace_id = self._workspace_id(pull_request)
source_vcs = pull_request.source_repo.scm_instance()
Martin Bornhold
settings: Read 'rebase-merge' config option and pass it to the VCS insntacen on merge.
r361 use_rebase = self._use_rebase_for_merging(pull_request)
Mathieu Cantin
mercurial: Add option to close a branch before merging
r2055 close_branch = self._close_branch_before_merging(pull_request)
project: added all source files and assets
r1 merge_state = target_vcs.merge(
target_reference, source_vcs, pull_request.source_ref_parts,
Mathieu Cantin
mercurial: Add option to close a branch before merging
r2055 workspace_id, dry_run=True, use_rebase=use_rebase,
close_branch=close_branch)
project: added all source files and assets
r1
# Do not store the response if there was an unknown error.
if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
Martin Bornhold
pr-shadow: Set last merge revision on merge status update.
r1043 pull_request._last_merge_source_rev = \
pull_request.source_ref_parts.commit_id
project: added all source files and assets
r1 pull_request._last_merge_target_rev = target_reference.commit_id
db: use a wrapper on pull requests _last_merge_status to ensure this is always INT....
r1968 pull_request.last_merge_status = merge_state.failure_reason
Martin Bornhold
pr-shadow: Adapt to new merge response object.
r1052 pull_request.shadow_merge_ref = merge_state.merge_ref
project: added all source files and assets
r1 Session().add(pull_request)
Martin Bornhold
pr-model: Commit changes to DB on merge status update.
r1044 Session().commit()
project: added all source files and assets
r1
return merge_state
def _workspace_id(self, pull_request):
workspace_id = 'pr-%s' % pull_request.pull_request_id
return workspace_id
def merge_status_message(self, status_code):
"""
Return a human friendly error message for the given merge status code.
"""
return self.MERGE_STATUS_MESSAGES[status_code]
def generate_repo_data(self, repo, commit_id=None, branch=None,
bookmark=None):
all_refs, selected_ref = \
self._get_repo_pullrequest_sources(
repo.scm_instance(), commit_id=commit_id,
branch=branch, bookmark=bookmark)
refs_select2 = []
for element in all_refs:
children = [{'id': x[0], 'text': x[1]} for x in element[0]]
refs_select2.append({'text': element[1], 'children': children})
return {
'user': {
'user_id': repo.user.user_id,
'username': repo.user.username,
security: use new safe escaped user attributes across the application....
r1815 'firstname': repo.user.first_name,
'lastname': repo.user.last_name,
project: added all source files and assets
r1 'gravatar_link': h.gravatar_url(repo.user.email, 14),
},
security: use safe escaped version of description for repo and repo group to potentially...
r1830 'description': h.chop_at_smart(repo.description_safe, '\n'),
project: added all source files and assets
r1 'refs': {
'all_refs': all_refs,
'selected_ref': selected_ref,
'select2_refs': refs_select2
}
}
def generate_pullrequest_title(self, source, source_ref, target):
Martin Bornhold
pr: Use unicode object when generating the pull request title
r842 return u'{source}#{at_ref} to {target}'.format(
project: added all source files and assets
r1 source=source,
at_ref=source_ref,
target=target,
)
def _cleanup_merge_workspace(self, pull_request):
# Merging related cleanup
target_scm = pull_request.target_repo.scm_instance()
workspace_id = 'pr-%s' % pull_request.pull_request_id
try:
target_scm.cleanup_merge_workspace(workspace_id)
except NotImplementedError:
pass
def _get_repo_pullrequest_sources(
self, repo, commit_id=None, branch=None, bookmark=None):
"""
Return a structure with repo's interesting commits, suitable for
the selectors in pullrequest controller
:param commit_id: a commit that must be in the list somehow
and selected by default
:param branch: a branch that must be in the list and selected
by default - even if closed
:param bookmark: a bookmark that must be in the list and selected
"""
commit_id = safe_str(commit_id) if commit_id else None
branch = safe_str(branch) if branch else None
bookmark = safe_str(bookmark) if bookmark else None
selected = None
# order matters: first source that has commit_id in it will be selected
sources = []
sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
sources.append(('branch', repo.branches.items(), _('Branches'), branch))
if commit_id:
ref_commit = (h.short_id(commit_id), commit_id)
sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
sources.append(
('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
)
groups = []
for group_key, ref_list, group_name, match in sources:
group_refs = []
for ref_name, ref_id in ref_list:
ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
group_refs.append((ref_key, ref_name))
dan
pullrequests: select a ref if one exists matching the commit id
r6 if not selected:
if set([commit_id, match]) & set([ref_id, ref_name]):
selected = ref_key
project: added all source files and assets
r1 if group_refs:
groups.append((group_refs, group_name))
if not selected:
ref = commit_id or branch or bookmark
if ref:
raise CommitDoesNotExistError(
'No commit refs could be found matching: %s' % ref)
elif repo.DEFAULT_BRANCH_NAME in repo.branches:
selected = 'branch:%s:%s' % (
repo.DEFAULT_BRANCH_NAME,
repo.branches[repo.DEFAULT_BRANCH_NAME]
)
elif repo.commit_ids:
rev = repo.commit_ids[0]
selected = 'rev:%s:%s' % (rev, rev)
else:
raise EmptyRepositoryError()
return groups, selected
pr-versioning: implemented versioning for pull requests....
r1368 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
return self._get_diff_from_pr_or_version(
source_repo, source_ref_id, target_ref_id, context=context)
project: added all source files and assets
r1
pr-versioning: implemented versioning for pull requests....
r1368 def _get_diff_from_pr_or_version(
self, source_repo, source_ref_id, target_ref_id, context):
project: added all source files and assets
r1 target_commit = source_repo.get_commit(
commit_id=safe_str(target_ref_id))
pr-versioning: implemented versioning for pull requests....
r1368 source_commit = source_repo.get_commit(
commit_id=safe_str(source_ref_id))
if isinstance(source_repo, Repository):
vcs_repo = source_repo.scm_instance()
else:
vcs_repo = source_repo
project: added all source files and assets
r1
# TODO: johbo: In the context of an update, we cannot reach
# the old commit anymore with our normal mechanisms. It needs
# some sort of special support in the vcs layer to avoid this
# workaround.
if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
vcs_repo.alias == 'git'):
source_commit.raw_id = safe_str(source_ref_id)
log.debug('calculating diff between '
'source_ref:%s and target_ref:%s for repo `%s`',
target_ref_id, source_ref_id,
safe_unicode(vcs_repo.path))
vcs_diff = vcs_repo.get_diff(
commit1=target_commit, commit2=source_commit, context=context)
return vcs_diff
def _is_merge_enabled(self, pull_request):
Mathieu Cantin
mercurial: Add option to close a branch before merging
r2055 return self._get_general_setting(
pull_request, 'rhodecode_pr_merge_enabled')
def _use_rebase_for_merging(self, pull_request):
pull-requests: use close action with proper --close-commit solution....
r2056 repo_type = pull_request.target_repo.repo_type
if repo_type == 'hg':
return self._get_general_setting(
pull_request, 'rhodecode_hg_use_rebase_for_merging')
elif repo_type == 'git':
return self._get_general_setting(
pull_request, 'rhodecode_git_use_rebase_for_merging')
return False
Mathieu Cantin
mercurial: Add option to close a branch before merging
r2055
def _close_branch_before_merging(self, pull_request):
pull-requests: use close action with proper --close-commit solution....
r2056 repo_type = pull_request.target_repo.repo_type
if repo_type == 'hg':
return self._get_general_setting(
pull_request, 'rhodecode_hg_close_branch_before_merging')
elif repo_type == 'git':
return self._get_general_setting(
pull_request, 'rhodecode_git_close_branch_before_merging')
return False
Mathieu Cantin
mercurial: Add option to close a branch before merging
r2055
def _get_general_setting(self, pull_request, settings_key, default=False):
project: added all source files and assets
r1 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
settings = settings_model.get_general_settings()
Mathieu Cantin
mercurial: Add option to close a branch before merging
r2055 return settings.get(settings_key, default)
Martin Bornhold
settings: Read 'rebase-merge' config option and pass it to the VCS insntacen on merge.
r361
audit-logs: implemented pull request and comment events.
r1807 def _log_audit_action(self, action, action_data, user, pull_request):
audit_logger.store(
action=action,
action_data=action_data,
user=user,
repo=pull_request.target_repo)
project: added all source files and assets
r1
pull-request: extended default reviewers functionality....
r1769 def get_reviewer_functions(self):
"""
Fetches functions for validation and fetching default reviewers.
If available we use the EE package, else we fallback to CE
package functions
"""
try:
from rc_reviewers.utils import get_default_reviewers_data
from rc_reviewers.utils import validate_default_reviewers
except ImportError:
from rhodecode.apps.repository.utils import \
get_default_reviewers_data
from rhodecode.apps.repository.utils import \
validate_default_reviewers
return get_default_reviewers_data, validate_default_reviewers
project: added all source files and assets
r1
pull-requests: unified merge checks....
r1335 class MergeCheck(object):
"""
Perform Merge Checks and returns a check object which stores information
about merge errors, and merge conditions
"""
merge-checks: added more detailed information about why merge is not possible....
r1341 TODO_CHECK = 'todo'
PERM_CHECK = 'perm'
REVIEW_CHECK = 'review'
MERGE_CHECK = 'merge'
pull-requests: unified merge checks....
r1335
def __init__(self):
pull-requests: add explicit CLOSE pr action instead of closed status from selector....
r1445 self.review_status = None
pull-requests: unified merge checks....
r1335 self.merge_possible = None
self.merge_msg = ''
self.failed = None
self.errors = []
merge-checks: added more detailed information about why merge is not possible....
r1341 self.error_details = OrderedDict()
pull-requests: unified merge checks....
r1335
merge-checks: added more detailed information about why merge is not possible....
r1341 def push_error(self, error_type, message, error_key, details):
pull-requests: unified merge checks....
r1335 self.failed = True
self.errors.append([error_type, message])
merge-checks: added more detailed information about why merge is not possible....
r1341 self.error_details[error_key] = dict(
details=details,
error_type=error_type,
message=message
)
pull-requests: unified merge checks....
r1335
@classmethod
def validate(cls, pull_request, user, fail_early=False, translator=None):
# if migrated to pyramid...
# _ = lambda: translator or _ # use passed in translator if any
merge_check = cls()
pull-requests: add explicit CLOSE pr action instead of closed status from selector....
r1445 # permissions to merge
pull-requests: unified merge checks....
r1335 user_allowed_to_merge = PullRequestModel().check_user_merge(
pull_request, user)
if not user_allowed_to_merge:
log.debug("MergeCheck: cannot merge, approval is pending.")
merge-checks: show only username for allowed to merge check.
r1383 msg = _('User `{}` not allowed to perform merge.').format(user.username)
merge-checks: added more detailed information about why merge is not possible....
r1341 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
pull-requests: unified merge checks....
r1335 if fail_early:
return merge_check
pull-requests: add explicit CLOSE pr action instead of closed status from selector....
r1445 # review status, must be always present
pull-requests: unified merge checks....
r1335 review_status = pull_request.calculated_review_status()
pull-requests: add explicit CLOSE pr action instead of closed status from selector....
r1445 merge_check.review_status = review_status
pull-requests: unified merge checks....
r1335 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
if not status_approved:
log.debug("MergeCheck: cannot merge, approval is pending.")
msg = _('Pull request reviewer approval is pending.')
merge-checks: added more detailed information about why merge is not possible....
r1341 merge_check.push_error(
'warning', msg, cls.REVIEW_CHECK, review_status)
pull-requests: unified merge checks....
r1335
if fail_early:
return merge_check
# left over TODOs
todos = CommentsModel().get_unresolved_todos(pull_request)
if todos:
log.debug("MergeCheck: cannot merge, {} "
"unresolved todos left.".format(len(todos)))
if len(todos) == 1:
msg = _('Cannot merge, {} TODO still not resolved.').format(
len(todos))
else:
msg = _('Cannot merge, {} TODOs still not resolved.').format(
len(todos))
merge-checks: added more detailed information about why merge is not possible....
r1341 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
pull-requests: unified merge checks....
r1335
if fail_early:
return merge_check
# merge possible
merge_status, msg = PullRequestModel().merge_status(pull_request)
merge_check.merge_possible = merge_status
merge_check.merge_msg = msg
if not merge_status:
log.debug(
"MergeCheck: cannot merge, pull request merge not possible.")
merge-checks: added more detailed information about why merge is not possible....
r1341 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
pull-requests: unified merge checks....
r1335
if fail_early:
return merge_check
pull-requests: migrated code from pylons to pyramid
r1974 log.debug('MergeCheck: is failed: %s', merge_check.failed)
pull-requests: unified merge checks....
r1335 return merge_check
pull-requests: use merge info to show how Pull requests will be merged....
r2053 @classmethod
def get_merge_conditions(cls, pull_request):
merge_details = {}
model = PullRequestModel()
use_rebase = model._use_rebase_for_merging(pull_request)
if use_rebase:
merge_details['merge_strategy'] = dict(
details={},
message=_('Merge strategy: rebase')
)
else:
merge_details['merge_strategy'] = dict(
details={},
message=_('Merge strategy: explicit merge commit')
)
close_branch = model._close_branch_before_merging(pull_request)
if close_branch:
repo_type = pull_request.target_repo.repo_type
if repo_type == 'hg':
close_msg = _('Source branch will be closed after merge.')
elif repo_type == 'git':
close_msg = _('Source branch will be deleted after merge.')
merge_details['close_branch'] = dict(
details={},
message=close_msg
)
return merge_details
pull-requests: unified merge checks....
r1335
project: added all source files and assets
r1 ChangeTuple = namedtuple('ChangeTuple',
pr-versioning: implemented versioning for pull requests....
r1368 ['added', 'common', 'removed', 'total'])
project: added all source files and assets
r1
FileChangeTuple = namedtuple('FileChangeTuple',
['added', 'modified', 'removed'])