##// END OF EJS Templates
pull-requests: optimize db transaction logic....
super-admin -
r4712:412d5d47 stable
parent child Browse files
Show More
@@ -1,298 +1,303 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2017-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import datetime
23 23
24 24 from rhodecode.lib.jsonalchemy import JsonRaw
25 25 from rhodecode.model import meta
26 26 from rhodecode.model.db import User, UserLog, Repository
27 27
28 28
29 29 log = logging.getLogger(__name__)
30 30
31 31 # action as key, and expected action_data as value
32 32 ACTIONS_V1 = {
33 33 'user.login.success': {'user_agent': ''},
34 34 'user.login.failure': {'user_agent': ''},
35 35 'user.logout': {'user_agent': ''},
36 36 'user.register': {},
37 37 'user.password.reset_request': {},
38 38 'user.push': {'user_agent': '', 'commit_ids': []},
39 39 'user.pull': {'user_agent': ''},
40 40
41 41 'user.create': {'data': {}},
42 42 'user.delete': {'old_data': {}},
43 43 'user.edit': {'old_data': {}},
44 44 'user.edit.permissions': {},
45 45 'user.edit.ip.add': {'ip': {}, 'user': {}},
46 46 'user.edit.ip.delete': {'ip': {}, 'user': {}},
47 47 'user.edit.token.add': {'token': {}, 'user': {}},
48 48 'user.edit.token.delete': {'token': {}, 'user': {}},
49 49 'user.edit.email.add': {'email': ''},
50 50 'user.edit.email.delete': {'email': ''},
51 51 'user.edit.ssh_key.add': {'token': {}, 'user': {}},
52 52 'user.edit.ssh_key.delete': {'token': {}, 'user': {}},
53 53 'user.edit.password_reset.enabled': {},
54 54 'user.edit.password_reset.disabled': {},
55 55
56 56 'user_group.create': {'data': {}},
57 57 'user_group.delete': {'old_data': {}},
58 58 'user_group.edit': {'old_data': {}},
59 59 'user_group.edit.permissions': {},
60 60 'user_group.edit.member.add': {'user': {}},
61 61 'user_group.edit.member.delete': {'user': {}},
62 62
63 63 'repo.create': {'data': {}},
64 64 'repo.fork': {'data': {}},
65 65 'repo.edit': {'old_data': {}},
66 66 'repo.edit.permissions': {},
67 67 'repo.edit.permissions.branch': {},
68 68 'repo.archive': {'old_data': {}},
69 69 'repo.delete': {'old_data': {}},
70 70
71 71 'repo.archive.download': {'user_agent': '', 'archive_name': '',
72 72 'archive_spec': '', 'archive_cached': ''},
73 73
74 74 'repo.permissions.branch_rule.create': {},
75 75 'repo.permissions.branch_rule.edit': {},
76 76 'repo.permissions.branch_rule.delete': {},
77 77
78 78 'repo.pull_request.create': '',
79 79 'repo.pull_request.edit': '',
80 80 'repo.pull_request.delete': '',
81 81 'repo.pull_request.close': '',
82 82 'repo.pull_request.merge': '',
83 83 'repo.pull_request.vote': '',
84 84 'repo.pull_request.comment.create': '',
85 85 'repo.pull_request.comment.edit': '',
86 86 'repo.pull_request.comment.delete': '',
87 87
88 88 'repo.pull_request.reviewer.add': '',
89 89 'repo.pull_request.reviewer.delete': '',
90 90
91 91 'repo.pull_request.observer.add': '',
92 92 'repo.pull_request.observer.delete': '',
93 93
94 94 'repo.commit.strip': {'commit_id': ''},
95 95 'repo.commit.comment.create': {'data': {}},
96 96 'repo.commit.comment.delete': {'data': {}},
97 97 'repo.commit.comment.edit': {'data': {}},
98 98 'repo.commit.vote': '',
99 99
100 100 'repo.artifact.add': '',
101 101 'repo.artifact.delete': '',
102 102
103 103 'repo_group.create': {'data': {}},
104 104 'repo_group.edit': {'old_data': {}},
105 105 'repo_group.edit.permissions': {},
106 106 'repo_group.delete': {'old_data': {}},
107 107 }
108 108
109 109 ACTIONS = ACTIONS_V1
110 110
111 111 SOURCE_WEB = 'source_web'
112 112 SOURCE_API = 'source_api'
113 113
114 114
115 115 class UserWrap(object):
116 116 """
117 117 Fake object used to imitate AuthUser
118 118 """
119 119
120 120 def __init__(self, user_id=None, username=None, ip_addr=None):
121 121 self.user_id = user_id
122 122 self.username = username
123 123 self.ip_addr = ip_addr
124 124
125 125
126 126 class RepoWrap(object):
127 127 """
128 128 Fake object used to imitate RepoObject that audit logger requires
129 129 """
130 130
131 131 def __init__(self, repo_id=None, repo_name=None):
132 132 self.repo_id = repo_id
133 133 self.repo_name = repo_name
134 134
135 135
136 136 def _store_log(action_name, action_data, user_id, username, user_data,
137 137 ip_address, repository_id, repository_name):
138 138 user_log = UserLog()
139 139 user_log.version = UserLog.VERSION_2
140 140
141 141 user_log.action = action_name
142 142 user_log.action_data = action_data or JsonRaw(u'{}')
143 143
144 144 user_log.user_ip = ip_address
145 145
146 146 user_log.user_id = user_id
147 147 user_log.username = username
148 148 user_log.user_data = user_data or JsonRaw(u'{}')
149 149
150 150 user_log.repository_id = repository_id
151 151 user_log.repository_name = repository_name
152 152
153 153 user_log.action_date = datetime.datetime.now()
154 154
155 155 return user_log
156 156
157 157
158 158 def store_web(*args, **kwargs):
159 159 action_data = {}
160 160 org_action_data = kwargs.pop('action_data', {})
161 161 action_data.update(org_action_data)
162 162 action_data['source'] = SOURCE_WEB
163 163 kwargs['action_data'] = action_data
164 164
165 165 return store(*args, **kwargs)
166 166
167 167
168 168 def store_api(*args, **kwargs):
169 169 action_data = {}
170 170 org_action_data = kwargs.pop('action_data', {})
171 171 action_data.update(org_action_data)
172 172 action_data['source'] = SOURCE_API
173 173 kwargs['action_data'] = action_data
174 174
175 175 return store(*args, **kwargs)
176 176
177 177
178 178 def store(action, user, action_data=None, user_data=None, ip_addr=None,
179 179 repo=None, sa_session=None, commit=False):
180 180 """
181 181 Audit logger for various actions made by users, typically this
182 182 results in a call such::
183 183
184 184 from rhodecode.lib import audit_logger
185 185
186 186 audit_logger.store(
187 187 'repo.edit', user=self._rhodecode_user)
188 188 audit_logger.store(
189 189 'repo.delete', action_data={'data': repo_data},
190 190 user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'))
191 191
192 192 # repo action
193 193 audit_logger.store(
194 194 'repo.delete',
195 195 user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'),
196 196 repo=audit_logger.RepoWrap(repo_name='some-repo'))
197 197
198 198 # repo action, when we know and have the repository object already
199 199 audit_logger.store(
200 200 'repo.delete', action_data={'source': audit_logger.SOURCE_WEB, },
201 201 user=self._rhodecode_user,
202 202 repo=repo_object)
203 203
204 204 # alternative wrapper to the above
205 205 audit_logger.store_web(
206 206 'repo.delete', action_data={},
207 207 user=self._rhodecode_user,
208 208 repo=repo_object)
209 209
210 210 # without an user ?
211 211 audit_logger.store(
212 212 'user.login.failure',
213 213 user=audit_logger.UserWrap(
214 214 username=self.request.params.get('username'),
215 215 ip_addr=self.request.remote_addr))
216 216
217 217 """
218 218 from rhodecode.lib.utils2 import safe_unicode
219 219 from rhodecode.lib.auth import AuthUser
220 220
221 221 action_spec = ACTIONS.get(action, None)
222 222 if action_spec is None:
223 223 raise ValueError('Action `{}` is not supported'.format(action))
224 224
225 225 if not sa_session:
226 226 sa_session = meta.Session()
227 227
228 228 try:
229 229 username = getattr(user, 'username', None)
230 230 if not username:
231 231 pass
232 232
233 233 user_id = getattr(user, 'user_id', None)
234 234 if not user_id:
235 235 # maybe we have username ? Try to figure user_id from username
236 236 if username:
237 237 user_id = getattr(
238 238 User.get_by_username(username), 'user_id', None)
239 239
240 240 ip_addr = ip_addr or getattr(user, 'ip_addr', None)
241 241 if not ip_addr:
242 242 pass
243 243
244 244 if not user_data:
245 245 # try to get this from the auth user
246 246 if isinstance(user, AuthUser):
247 247 user_data = {
248 248 'username': user.username,
249 249 'email': user.email,
250 250 }
251 251
252 252 repository_name = getattr(repo, 'repo_name', None)
253 253 repository_id = getattr(repo, 'repo_id', None)
254 254 if not repository_id:
255 255 # maybe we have repo_name ? Try to figure repo_id from repo_name
256 256 if repository_name:
257 257 repository_id = getattr(
258 258 Repository.get_by_repo_name(repository_name), 'repo_id', None)
259 259
260 260 action_name = safe_unicode(action)
261 261 ip_address = safe_unicode(ip_addr)
262 262
263 263 with sa_session.no_autoflush:
264 update_user_last_activity(sa_session, user_id)
265 264
266 265 user_log = _store_log(
267 266 action_name=action_name,
268 267 action_data=action_data or {},
269 268 user_id=user_id,
270 269 username=username,
271 270 user_data=user_data or {},
272 271 ip_address=ip_address,
273 272 repository_id=repository_id,
274 273 repository_name=repository_name
275 274 )
276 275
277 276 sa_session.add(user_log)
277 if commit:
278 sa_session.commit()
279 entry_id = user_log.entry_id or ''
280
281 update_user_last_activity(sa_session, user_id)
278 282
279 283 if commit:
280 284 sa_session.commit()
281 285
282 entry_id = user_log.entry_id or ''
283 286 log.info('AUDIT[%s]: Logging action: `%s` by user:id:%s[%s] ip:%s',
284 287 entry_id, action_name, user_id, username, ip_address)
285 288
286 289 except Exception:
287 290 log.exception('AUDIT: failed to store audit log')
288 291
289 292
290 293 def update_user_last_activity(sa_session, user_id):
291 294 _last_activity = datetime.datetime.now()
292 295 try:
293 296 sa_session.query(User).filter(User.user_id == user_id).update(
294 297 {"last_activity": _last_activity})
295 298 log.debug(
296 299 'updated user `%s` last activity to:%s', user_id, _last_activity)
297 300 except Exception:
298 log.exception("Failed last activity update")
301 log.exception("Failed last activity update for user_id: %s", user_id)
302 sa_session.rollback()
303
@@ -1,2372 +1,2378 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 log.debug('Obtaining commit authors from set of commits')
158 158 _compare_data = target_scm.compare(
159 159 target_ref, source_ref, source_scm, merge=True,
160 160 pre_load=["author", "date", "message"]
161 161 )
162 162
163 163 for commit in _compare_data:
164 164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 165 # at this function which is later called via JSON serialization
166 166 serialized_commit = dict(
167 167 author=commit.author,
168 168 date=commit.date,
169 169 message=commit.message,
170 170 commit_id=commit.raw_id,
171 171 raw_id=commit.raw_id
172 172 )
173 173 commits.append(serialized_commit)
174 174 user = User.get_from_cs_author(serialized_commit['author'])
175 175 if user and user not in commit_authors:
176 176 commit_authors.append(user)
177 177
178 178 # lines
179 179 if get_authors:
180 180 log.debug('Calculating authors of changed files')
181 181 target_commit = source_repo.get_commit(ancestor_id)
182 182
183 183 for fname, lines in changed_lines.items():
184 184
185 185 try:
186 186 node = target_commit.get_node(fname, pre_load=["is_binary"])
187 187 except Exception:
188 188 log.exception("Failed to load node with path %s", fname)
189 189 continue
190 190
191 191 if not isinstance(node, FileNode):
192 192 continue
193 193
194 194 # NOTE(marcink): for binary node we don't do annotation, just use last author
195 195 if node.is_binary:
196 196 author = node.last_commit.author
197 197 email = node.last_commit.author_email
198 198
199 199 user = User.get_from_cs_author(author)
200 200 if user:
201 201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
202 202 author_counts[author] = author_counts.get(author, 0) + 1
203 203 email_counts[email] = email_counts.get(email, 0) + 1
204 204
205 205 continue
206 206
207 207 for annotation in node.annotate:
208 208 line_no, commit_id, get_commit_func, line_text = annotation
209 209 if line_no in lines:
210 210 if commit_id not in _commit_cache:
211 211 _commit_cache[commit_id] = get_commit_func()
212 212 commit = _commit_cache[commit_id]
213 213 author = commit.author
214 214 email = commit.author_email
215 215 user = User.get_from_cs_author(author)
216 216 if user:
217 217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
218 218 author_counts[author] = author_counts.get(author, 0) + 1
219 219 email_counts[email] = email_counts.get(email, 0) + 1
220 220
221 221 log.debug('Default reviewers processing finished')
222 222
223 223 return {
224 224 'commits': commits,
225 225 'files': all_files_changes,
226 226 'stats': stats,
227 227 'ancestor': ancestor_id,
228 228 # original authors of modified files
229 229 'original_authors': {
230 230 'users': user_counts,
231 231 'authors': author_counts,
232 232 'emails': email_counts,
233 233 },
234 234 'commit_authors': commit_authors
235 235 }
236 236
237 237
238 238 class PullRequestModel(BaseModel):
239 239
240 240 cls = PullRequest
241 241
242 242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
243 243
244 244 UPDATE_STATUS_MESSAGES = {
245 245 UpdateFailureReason.NONE: lazy_ugettext(
246 246 'Pull request update successful.'),
247 247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
248 248 'Pull request update failed because of an unknown error.'),
249 249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
250 250 'No update needed because the source and target have not changed.'),
251 251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
252 252 'Pull request cannot be updated because the reference type is '
253 253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
254 254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
255 255 'This pull request cannot be updated because the target '
256 256 'reference is missing.'),
257 257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
258 258 'This pull request cannot be updated because the source '
259 259 'reference is missing.'),
260 260 }
261 261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
262 262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
263 263
264 264 def __get_pull_request(self, pull_request):
265 265 return self._get_instance((
266 266 PullRequest, PullRequestVersion), pull_request)
267 267
268 268 def _check_perms(self, perms, pull_request, user, api=False):
269 269 if not api:
270 270 return h.HasRepoPermissionAny(*perms)(
271 271 user=user, repo_name=pull_request.target_repo.repo_name)
272 272 else:
273 273 return h.HasRepoPermissionAnyApi(*perms)(
274 274 user=user, repo_name=pull_request.target_repo.repo_name)
275 275
276 276 def check_user_read(self, pull_request, user, api=False):
277 277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
278 278 return self._check_perms(_perms, pull_request, user, api)
279 279
280 280 def check_user_merge(self, pull_request, user, api=False):
281 281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
282 282 return self._check_perms(_perms, pull_request, user, api)
283 283
284 284 def check_user_update(self, pull_request, user, api=False):
285 285 owner = user.user_id == pull_request.user_id
286 286 return self.check_user_merge(pull_request, user, api) or owner
287 287
288 288 def check_user_delete(self, pull_request, user):
289 289 owner = user.user_id == pull_request.user_id
290 290 _perms = ('repository.admin',)
291 291 return self._check_perms(_perms, pull_request, user) or owner
292 292
293 293 def is_user_reviewer(self, pull_request, user):
294 294 return user.user_id in [
295 295 x.user_id for x in
296 296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
297 297 if x.user
298 298 ]
299 299
300 300 def check_user_change_status(self, pull_request, user, api=False):
301 301 return self.check_user_update(pull_request, user, api) \
302 302 or self.is_user_reviewer(pull_request, user)
303 303
304 304 def check_user_comment(self, pull_request, user):
305 305 owner = user.user_id == pull_request.user_id
306 306 return self.check_user_read(pull_request, user) or owner
307 307
308 308 def get(self, pull_request):
309 309 return self.__get_pull_request(pull_request)
310 310
311 311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
312 312 statuses=None, opened_by=None, order_by=None,
313 313 order_dir='desc', only_created=False):
314 314 repo = None
315 315 if repo_name:
316 316 repo = self._get_repo(repo_name)
317 317
318 318 q = PullRequest.query()
319 319
320 320 if search_q:
321 321 like_expression = u'%{}%'.format(safe_unicode(search_q))
322 322 q = q.join(User, User.user_id == PullRequest.user_id)
323 323 q = q.filter(or_(
324 324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
325 325 User.username.ilike(like_expression),
326 326 PullRequest.title.ilike(like_expression),
327 327 PullRequest.description.ilike(like_expression),
328 328 ))
329 329
330 330 # source or target
331 331 if repo and source:
332 332 q = q.filter(PullRequest.source_repo == repo)
333 333 elif repo:
334 334 q = q.filter(PullRequest.target_repo == repo)
335 335
336 336 # closed,opened
337 337 if statuses:
338 338 q = q.filter(PullRequest.status.in_(statuses))
339 339
340 340 # opened by filter
341 341 if opened_by:
342 342 q = q.filter(PullRequest.user_id.in_(opened_by))
343 343
344 344 # only get those that are in "created" state
345 345 if only_created:
346 346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'id': PullRequest.pull_request_id,
352 352 'title': PullRequest.title,
353 353 'updated_on_raw': PullRequest.updated_on,
354 354 'target_repo': PullRequest.target_repo_id
355 355 }
356 356 if order_dir == 'asc':
357 357 q = q.order_by(order_map[order_by].asc())
358 358 else:
359 359 q = q.order_by(order_map[order_by].desc())
360 360
361 361 return q
362 362
363 363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
364 364 opened_by=None):
365 365 """
366 366 Count the number of pull requests for a specific repository.
367 367
368 368 :param repo_name: target or source repo
369 369 :param search_q: filter by text
370 370 :param source: boolean flag to specify if repo_name refers to source
371 371 :param statuses: list of pull request statuses
372 372 :param opened_by: author user of the pull request
373 373 :returns: int number of pull requests
374 374 """
375 375 q = self._prepare_get_all_query(
376 376 repo_name, search_q=search_q, source=source, statuses=statuses,
377 377 opened_by=opened_by)
378 378
379 379 return q.count()
380 380
381 381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
382 382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
383 383 """
384 384 Get all pull requests for a specific repository.
385 385
386 386 :param repo_name: target or source repo
387 387 :param search_q: filter by text
388 388 :param source: boolean flag to specify if repo_name refers to source
389 389 :param statuses: list of pull request statuses
390 390 :param opened_by: author user of the pull request
391 391 :param offset: pagination offset
392 392 :param length: length of returned list
393 393 :param order_by: order of the returned list
394 394 :param order_dir: 'asc' or 'desc' ordering direction
395 395 :returns: list of pull requests
396 396 """
397 397 q = self._prepare_get_all_query(
398 398 repo_name, search_q=search_q, source=source, statuses=statuses,
399 399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
400 400
401 401 if length:
402 402 pull_requests = q.limit(length).offset(offset).all()
403 403 else:
404 404 pull_requests = q.all()
405 405
406 406 return pull_requests
407 407
408 408 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
409 409 """
410 410 Count the number of pull requests for a specific repository that are
411 411 awaiting review.
412 412
413 413 :param repo_name: target or source repo
414 414 :param search_q: filter by text
415 415 :param statuses: list of pull request statuses
416 416 :returns: int number of pull requests
417 417 """
418 418 pull_requests = self.get_awaiting_review(
419 419 repo_name, search_q=search_q, statuses=statuses)
420 420
421 421 return len(pull_requests)
422 422
423 423 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
424 424 offset=0, length=None, order_by=None, order_dir='desc'):
425 425 """
426 426 Get all pull requests for a specific repository that are awaiting
427 427 review.
428 428
429 429 :param repo_name: target or source repo
430 430 :param search_q: filter by text
431 431 :param statuses: list of pull request statuses
432 432 :param offset: pagination offset
433 433 :param length: length of returned list
434 434 :param order_by: order of the returned list
435 435 :param order_dir: 'asc' or 'desc' ordering direction
436 436 :returns: list of pull requests
437 437 """
438 438 pull_requests = self.get_all(
439 439 repo_name, search_q=search_q, statuses=statuses,
440 440 order_by=order_by, order_dir=order_dir)
441 441
442 442 _filtered_pull_requests = []
443 443 for pr in pull_requests:
444 444 status = pr.calculated_review_status()
445 445 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
446 446 ChangesetStatus.STATUS_UNDER_REVIEW]:
447 447 _filtered_pull_requests.append(pr)
448 448 if length:
449 449 return _filtered_pull_requests[offset:offset+length]
450 450 else:
451 451 return _filtered_pull_requests
452 452
453 453 def _prepare_awaiting_my_review_review_query(
454 454 self, repo_name, user_id, search_q=None, statuses=None,
455 455 order_by=None, order_dir='desc'):
456 456
457 457 for_review_statuses = [
458 458 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
459 459 ]
460 460
461 461 pull_request_alias = aliased(PullRequest)
462 462 status_alias = aliased(ChangesetStatus)
463 463 reviewers_alias = aliased(PullRequestReviewers)
464 464 repo_alias = aliased(Repository)
465 465
466 466 last_ver_subq = Session()\
467 467 .query(func.min(ChangesetStatus.version)) \
468 468 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
469 469 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
470 470 .subquery()
471 471
472 472 q = Session().query(pull_request_alias) \
473 473 .options(lazyload(pull_request_alias.author)) \
474 474 .join(reviewers_alias,
475 475 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
476 476 .join(repo_alias,
477 477 repo_alias.repo_id == pull_request_alias.target_repo_id) \
478 478 .outerjoin(status_alias,
479 479 and_(status_alias.user_id == reviewers_alias.user_id,
480 480 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
481 481 .filter(or_(status_alias.version == null(),
482 482 status_alias.version == last_ver_subq)) \
483 483 .filter(reviewers_alias.user_id == user_id) \
484 484 .filter(repo_alias.repo_name == repo_name) \
485 485 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
486 486 .group_by(pull_request_alias)
487 487
488 488 # closed,opened
489 489 if statuses:
490 490 q = q.filter(pull_request_alias.status.in_(statuses))
491 491
492 492 if search_q:
493 493 like_expression = u'%{}%'.format(safe_unicode(search_q))
494 494 q = q.join(User, User.user_id == pull_request_alias.user_id)
495 495 q = q.filter(or_(
496 496 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
497 497 User.username.ilike(like_expression),
498 498 pull_request_alias.title.ilike(like_expression),
499 499 pull_request_alias.description.ilike(like_expression),
500 500 ))
501 501
502 502 if order_by:
503 503 order_map = {
504 504 'name_raw': pull_request_alias.pull_request_id,
505 505 'title': pull_request_alias.title,
506 506 'updated_on_raw': pull_request_alias.updated_on,
507 507 'target_repo': pull_request_alias.target_repo_id
508 508 }
509 509 if order_dir == 'asc':
510 510 q = q.order_by(order_map[order_by].asc())
511 511 else:
512 512 q = q.order_by(order_map[order_by].desc())
513 513
514 514 return q
515 515
516 516 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
517 517 """
518 518 Count the number of pull requests for a specific repository that are
519 519 awaiting review from a specific user.
520 520
521 521 :param repo_name: target or source repo
522 522 :param user_id: reviewer user of the pull request
523 523 :param search_q: filter by text
524 524 :param statuses: list of pull request statuses
525 525 :returns: int number of pull requests
526 526 """
527 527 q = self._prepare_awaiting_my_review_review_query(
528 528 repo_name, user_id, search_q=search_q, statuses=statuses)
529 529 return q.count()
530 530
531 531 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
532 532 offset=0, length=None, order_by=None, order_dir='desc'):
533 533 """
534 534 Get all pull requests for a specific repository that are awaiting
535 535 review from a specific user.
536 536
537 537 :param repo_name: target or source repo
538 538 :param user_id: reviewer user of the pull request
539 539 :param search_q: filter by text
540 540 :param statuses: list of pull request statuses
541 541 :param offset: pagination offset
542 542 :param length: length of returned list
543 543 :param order_by: order of the returned list
544 544 :param order_dir: 'asc' or 'desc' ordering direction
545 545 :returns: list of pull requests
546 546 """
547 547
548 548 q = self._prepare_awaiting_my_review_review_query(
549 549 repo_name, user_id, search_q=search_q, statuses=statuses,
550 550 order_by=order_by, order_dir=order_dir)
551 551
552 552 if length:
553 553 pull_requests = q.limit(length).offset(offset).all()
554 554 else:
555 555 pull_requests = q.all()
556 556
557 557 return pull_requests
558 558
559 559 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
560 560 order_by=None, order_dir='desc'):
561 561 """
562 562 return a query of pull-requests user is an creator, or he's added as a reviewer
563 563 """
564 564 q = PullRequest.query()
565 565 if user_id:
566 566 reviewers_subquery = Session().query(
567 567 PullRequestReviewers.pull_request_id).filter(
568 568 PullRequestReviewers.user_id == user_id).subquery()
569 569 user_filter = or_(
570 570 PullRequest.user_id == user_id,
571 571 PullRequest.pull_request_id.in_(reviewers_subquery)
572 572 )
573 573 q = PullRequest.query().filter(user_filter)
574 574
575 575 # closed,opened
576 576 if statuses:
577 577 q = q.filter(PullRequest.status.in_(statuses))
578 578
579 579 if query:
580 580 like_expression = u'%{}%'.format(safe_unicode(query))
581 581 q = q.join(User, User.user_id == PullRequest.user_id)
582 582 q = q.filter(or_(
583 583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
584 584 User.username.ilike(like_expression),
585 585 PullRequest.title.ilike(like_expression),
586 586 PullRequest.description.ilike(like_expression),
587 587 ))
588 588 if order_by:
589 589 order_map = {
590 590 'name_raw': PullRequest.pull_request_id,
591 591 'title': PullRequest.title,
592 592 'updated_on_raw': PullRequest.updated_on,
593 593 'target_repo': PullRequest.target_repo_id
594 594 }
595 595 if order_dir == 'asc':
596 596 q = q.order_by(order_map[order_by].asc())
597 597 else:
598 598 q = q.order_by(order_map[order_by].desc())
599 599
600 600 return q
601 601
602 602 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 603 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 604 return q.count()
605 605
606 606 def get_im_participating_in(
607 607 self, user_id=None, statuses=None, query='', offset=0,
608 608 length=None, order_by=None, order_dir='desc'):
609 609 """
610 610 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 611 """
612 612
613 613 q = self._prepare_im_participating_query(
614 614 user_id, statuses=statuses, query=query, order_by=order_by,
615 615 order_dir=order_dir)
616 616
617 617 if length:
618 618 pull_requests = q.limit(length).offset(offset).all()
619 619 else:
620 620 pull_requests = q.all()
621 621
622 622 return pull_requests
623 623
624 624 def _prepare_participating_in_for_review_query(
625 625 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626 626
627 627 for_review_statuses = [
628 628 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 629 ]
630 630
631 631 pull_request_alias = aliased(PullRequest)
632 632 status_alias = aliased(ChangesetStatus)
633 633 reviewers_alias = aliased(PullRequestReviewers)
634 634
635 635 last_ver_subq = Session()\
636 636 .query(func.min(ChangesetStatus.version)) \
637 637 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 638 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 639 .subquery()
640 640
641 641 q = Session().query(pull_request_alias) \
642 642 .options(lazyload(pull_request_alias.author)) \
643 643 .join(reviewers_alias,
644 644 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 645 .outerjoin(status_alias,
646 646 and_(status_alias.user_id == reviewers_alias.user_id,
647 647 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 648 .filter(or_(status_alias.version == null(),
649 649 status_alias.version == last_ver_subq)) \
650 650 .filter(reviewers_alias.user_id == user_id) \
651 651 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 652 .group_by(pull_request_alias)
653 653
654 654 # closed,opened
655 655 if statuses:
656 656 q = q.filter(pull_request_alias.status.in_(statuses))
657 657
658 658 if query:
659 659 like_expression = u'%{}%'.format(safe_unicode(query))
660 660 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 661 q = q.filter(or_(
662 662 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 663 User.username.ilike(like_expression),
664 664 pull_request_alias.title.ilike(like_expression),
665 665 pull_request_alias.description.ilike(like_expression),
666 666 ))
667 667
668 668 if order_by:
669 669 order_map = {
670 670 'name_raw': pull_request_alias.pull_request_id,
671 671 'title': pull_request_alias.title,
672 672 'updated_on_raw': pull_request_alias.updated_on,
673 673 'target_repo': pull_request_alias.target_repo_id
674 674 }
675 675 if order_dir == 'asc':
676 676 q = q.order_by(order_map[order_by].asc())
677 677 else:
678 678 q = q.order_by(order_map[order_by].desc())
679 679
680 680 return q
681 681
682 682 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 683 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 684 return q.count()
685 685
686 686 def get_im_participating_in_for_review(
687 687 self, user_id, statuses=None, query='', offset=0,
688 688 length=None, order_by=None, order_dir='desc'):
689 689 """
690 690 Get all Pull requests that needs user approval or rejection
691 691 """
692 692
693 693 q = self._prepare_participating_in_for_review_query(
694 694 user_id, statuses=statuses, query=query, order_by=order_by,
695 695 order_dir=order_dir)
696 696
697 697 if length:
698 698 pull_requests = q.limit(length).offset(offset).all()
699 699 else:
700 700 pull_requests = q.all()
701 701
702 702 return pull_requests
703 703
704 704 def get_versions(self, pull_request):
705 705 """
706 706 returns version of pull request sorted by ID descending
707 707 """
708 708 return PullRequestVersion.query()\
709 709 .filter(PullRequestVersion.pull_request == pull_request)\
710 710 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 711 .all()
712 712
713 713 def get_pr_version(self, pull_request_id, version=None):
714 714 at_version = None
715 715
716 716 if version and version == 'latest':
717 717 pull_request_ver = PullRequest.get(pull_request_id)
718 718 pull_request_obj = pull_request_ver
719 719 _org_pull_request_obj = pull_request_obj
720 720 at_version = 'latest'
721 721 elif version:
722 722 pull_request_ver = PullRequestVersion.get_or_404(version)
723 723 pull_request_obj = pull_request_ver
724 724 _org_pull_request_obj = pull_request_ver.pull_request
725 725 at_version = pull_request_ver.pull_request_version_id
726 726 else:
727 727 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 728 pull_request_id)
729 729
730 730 pull_request_display_obj = PullRequest.get_pr_display_object(
731 731 pull_request_obj, _org_pull_request_obj)
732 732
733 733 return _org_pull_request_obj, pull_request_obj, \
734 734 pull_request_display_obj, at_version
735 735
736 736 def pr_commits_versions(self, versions):
737 737 """
738 738 Maps the pull-request commits into all known PR versions. This way we can obtain
739 739 each pr version the commit was introduced in.
740 740 """
741 741 commit_versions = collections.defaultdict(list)
742 742 num_versions = [x.pull_request_version_id for x in versions]
743 743 for ver in versions:
744 744 for commit_id in ver.revisions:
745 745 ver_idx = ChangesetComment.get_index_from_version(
746 746 ver.pull_request_version_id, num_versions=num_versions)
747 747 commit_versions[commit_id].append(ver_idx)
748 748 return commit_versions
749 749
750 750 def create(self, created_by, source_repo, source_ref, target_repo,
751 751 target_ref, revisions, reviewers, observers, title, description=None,
752 752 common_ancestor_id=None,
753 753 description_renderer=None,
754 754 reviewer_data=None, translator=None, auth_user=None):
755 755 translator = translator or get_current_request().translate
756 756
757 757 created_by_user = self._get_user(created_by)
758 758 auth_user = auth_user or created_by_user.AuthUser()
759 759 source_repo = self._get_repo(source_repo)
760 760 target_repo = self._get_repo(target_repo)
761 761
762 762 pull_request = PullRequest()
763 763 pull_request.source_repo = source_repo
764 764 pull_request.source_ref = source_ref
765 765 pull_request.target_repo = target_repo
766 766 pull_request.target_ref = target_ref
767 767 pull_request.revisions = revisions
768 768 pull_request.title = title
769 769 pull_request.description = description
770 770 pull_request.description_renderer = description_renderer
771 771 pull_request.author = created_by_user
772 772 pull_request.reviewer_data = reviewer_data
773 773 pull_request.pull_request_state = pull_request.STATE_CREATING
774 774 pull_request.common_ancestor_id = common_ancestor_id
775 775
776 776 Session().add(pull_request)
777 777 Session().flush()
778 778
779 779 reviewer_ids = set()
780 780 # members / reviewers
781 781 for reviewer_object in reviewers:
782 782 user_id, reasons, mandatory, role, rules = reviewer_object
783 783 user = self._get_user(user_id)
784 784
785 785 # skip duplicates
786 786 if user.user_id in reviewer_ids:
787 787 continue
788 788
789 789 reviewer_ids.add(user.user_id)
790 790
791 791 reviewer = PullRequestReviewers()
792 792 reviewer.user = user
793 793 reviewer.pull_request = pull_request
794 794 reviewer.reasons = reasons
795 795 reviewer.mandatory = mandatory
796 796 reviewer.role = role
797 797
798 798 # NOTE(marcink): pick only first rule for now
799 799 rule_id = list(rules)[0] if rules else None
800 800 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 801 if rule:
802 802 review_group = rule.user_group_vote_rule(user_id)
803 803 # we check if this particular reviewer is member of a voting group
804 804 if review_group:
805 805 # NOTE(marcink):
806 806 # can be that user is member of more but we pick the first same,
807 807 # same as default reviewers algo
808 808 review_group = review_group[0]
809 809
810 810 rule_data = {
811 811 'rule_name':
812 812 rule.review_rule_name,
813 813 'rule_user_group_entry_id':
814 814 review_group.repo_review_rule_users_group_id,
815 815 'rule_user_group_name':
816 816 review_group.users_group.users_group_name,
817 817 'rule_user_group_members':
818 818 [x.user.username for x in review_group.users_group.members],
819 819 'rule_user_group_members_id':
820 820 [x.user.user_id for x in review_group.users_group.members],
821 821 }
822 822 # e.g {'vote_rule': -1, 'mandatory': True}
823 823 rule_data.update(review_group.rule_data())
824 824
825 825 reviewer.rule_data = rule_data
826 826
827 827 Session().add(reviewer)
828 828 Session().flush()
829 829
830 830 for observer_object in observers:
831 831 user_id, reasons, mandatory, role, rules = observer_object
832 832 user = self._get_user(user_id)
833 833
834 834 # skip duplicates from reviewers
835 835 if user.user_id in reviewer_ids:
836 836 continue
837 837
838 838 #reviewer_ids.add(user.user_id)
839 839
840 840 observer = PullRequestReviewers()
841 841 observer.user = user
842 842 observer.pull_request = pull_request
843 843 observer.reasons = reasons
844 844 observer.mandatory = mandatory
845 845 observer.role = role
846 846
847 847 # NOTE(marcink): pick only first rule for now
848 848 rule_id = list(rules)[0] if rules else None
849 849 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 850 if rule:
851 851 # TODO(marcink): do we need this for observers ??
852 852 pass
853 853
854 854 Session().add(observer)
855 855 Session().flush()
856 856
857 857 # Set approval status to "Under Review" for all commits which are
858 858 # part of this pull request.
859 859 ChangesetStatusModel().set_status(
860 860 repo=target_repo,
861 861 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 862 user=created_by_user,
863 863 pull_request=pull_request
864 864 )
865 865 # we commit early at this point. This has to do with a fact
866 866 # that before queries do some row-locking. And because of that
867 867 # we need to commit and finish transaction before below validate call
868 868 # that for large repos could be long resulting in long row locks
869 869 Session().commit()
870 870
871 871 # prepare workspace, and run initial merge simulation. Set state during that
872 872 # operation
873 873 pull_request = PullRequest.get(pull_request.pull_request_id)
874 874
875 875 # set as merging, for merge simulation, and if finished to created so we mark
876 876 # simulation is working fine
877 877 with pull_request.set_state(PullRequest.STATE_MERGING,
878 878 final_state=PullRequest.STATE_CREATED) as state_obj:
879 879 MergeCheck.validate(
880 880 pull_request, auth_user=auth_user, translator=translator)
881 881
882 882 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 883 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884 884
885 885 creation_data = pull_request.get_api_data(with_merge_state=False)
886 886 self._log_audit_action(
887 887 'repo.pull_request.create', {'data': creation_data},
888 888 auth_user, pull_request)
889 889
890 890 return pull_request
891 891
892 892 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 893 pull_request = self.__get_pull_request(pull_request)
894 894 target_scm = pull_request.target_repo.scm_instance()
895 895 if action == 'create':
896 896 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 897 elif action == 'merge':
898 898 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 899 elif action == 'close':
900 900 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 901 elif action == 'review_status_change':
902 902 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 903 elif action == 'update':
904 904 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 905 elif action == 'comment':
906 906 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 907 elif action == 'comment_edit':
908 908 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 909 else:
910 910 return
911 911
912 912 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 913 pull_request, action, trigger_hook)
914 914 trigger_hook(
915 915 username=user.username,
916 916 repo_name=pull_request.target_repo.repo_name,
917 917 repo_type=target_scm.alias,
918 918 pull_request=pull_request,
919 919 data=data)
920 920
921 921 def _get_commit_ids(self, pull_request):
922 922 """
923 923 Return the commit ids of the merged pull request.
924 924
925 925 This method is not dealing correctly yet with the lack of autoupdates
926 926 nor with the implicit target updates.
927 927 For example: if a commit in the source repo is already in the target it
928 928 will be reported anyways.
929 929 """
930 930 merge_rev = pull_request.merge_rev
931 931 if merge_rev is None:
932 932 raise ValueError('This pull request was not merged yet')
933 933
934 934 commit_ids = list(pull_request.revisions)
935 935 if merge_rev not in commit_ids:
936 936 commit_ids.append(merge_rev)
937 937
938 938 return commit_ids
939 939
940 940 def merge_repo(self, pull_request, user, extras):
941 941 log.debug("Merging pull request %s", pull_request.pull_request_id)
942 942 extras['user_agent'] = 'internal-merge'
943 943 merge_state = self._merge_pull_request(pull_request, user, extras)
944 944 if merge_state.executed:
945 945 log.debug("Merge was successful, updating the pull request comments.")
946 946 self._comment_and_close_pr(pull_request, user, merge_state)
947 947
948 948 self._log_audit_action(
949 949 'repo.pull_request.merge',
950 950 {'merge_state': merge_state.__dict__},
951 951 user, pull_request)
952 952
953 953 else:
954 954 log.warn("Merge failed, not updating the pull request.")
955 955 return merge_state
956 956
957 957 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
958 958 target_vcs = pull_request.target_repo.scm_instance()
959 959 source_vcs = pull_request.source_repo.scm_instance()
960 960
961 961 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
962 962 pr_id=pull_request.pull_request_id,
963 963 pr_title=pull_request.title,
964 964 source_repo=source_vcs.name,
965 965 source_ref_name=pull_request.source_ref_parts.name,
966 966 target_repo=target_vcs.name,
967 967 target_ref_name=pull_request.target_ref_parts.name,
968 968 )
969 969
970 970 workspace_id = self._workspace_id(pull_request)
971 971 repo_id = pull_request.target_repo.repo_id
972 972 use_rebase = self._use_rebase_for_merging(pull_request)
973 973 close_branch = self._close_branch_before_merging(pull_request)
974 974 user_name = self._user_name_for_merging(pull_request, user)
975 975
976 976 target_ref = self._refresh_reference(
977 977 pull_request.target_ref_parts, target_vcs)
978 978
979 979 callback_daemon, extras = prepare_callback_daemon(
980 980 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
981 981 host=vcs_settings.HOOKS_HOST,
982 982 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
983 983
984 984 with callback_daemon:
985 985 # TODO: johbo: Implement a clean way to run a config_override
986 986 # for a single call.
987 987 target_vcs.config.set(
988 988 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
989 989
990 990 merge_state = target_vcs.merge(
991 991 repo_id, workspace_id, target_ref, source_vcs,
992 992 pull_request.source_ref_parts,
993 993 user_name=user_name, user_email=user.email,
994 994 message=message, use_rebase=use_rebase,
995 995 close_branch=close_branch)
996 996 return merge_state
997 997
998 998 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
999 999 pull_request.merge_rev = merge_state.merge_ref.commit_id
1000 1000 pull_request.updated_on = datetime.datetime.now()
1001 1001 close_msg = close_msg or 'Pull request merged and closed'
1002 1002
1003 1003 CommentsModel().create(
1004 1004 text=safe_unicode(close_msg),
1005 1005 repo=pull_request.target_repo.repo_id,
1006 1006 user=user.user_id,
1007 1007 pull_request=pull_request.pull_request_id,
1008 1008 f_path=None,
1009 1009 line_no=None,
1010 1010 closing_pr=True
1011 1011 )
1012 1012
1013 1013 Session().add(pull_request)
1014 1014 Session().flush()
1015 1015 # TODO: paris: replace invalidation with less radical solution
1016 1016 ScmModel().mark_for_invalidation(
1017 1017 pull_request.target_repo.repo_name)
1018 1018 self.trigger_pull_request_hook(pull_request, user, 'merge')
1019 1019
1020 1020 def has_valid_update_type(self, pull_request):
1021 1021 source_ref_type = pull_request.source_ref_parts.type
1022 1022 return source_ref_type in self.REF_TYPES
1023 1023
1024 1024 def get_flow_commits(self, pull_request):
1025 1025
1026 1026 # source repo
1027 1027 source_ref_name = pull_request.source_ref_parts.name
1028 1028 source_ref_type = pull_request.source_ref_parts.type
1029 1029 source_ref_id = pull_request.source_ref_parts.commit_id
1030 1030 source_repo = pull_request.source_repo.scm_instance()
1031 1031
1032 1032 try:
1033 1033 if source_ref_type in self.REF_TYPES:
1034 1034 source_commit = source_repo.get_commit(
1035 1035 source_ref_name, reference_obj=pull_request.source_ref_parts)
1036 1036 else:
1037 1037 source_commit = source_repo.get_commit(source_ref_id)
1038 1038 except CommitDoesNotExistError:
1039 1039 raise SourceRefMissing()
1040 1040
1041 1041 # target repo
1042 1042 target_ref_name = pull_request.target_ref_parts.name
1043 1043 target_ref_type = pull_request.target_ref_parts.type
1044 1044 target_ref_id = pull_request.target_ref_parts.commit_id
1045 1045 target_repo = pull_request.target_repo.scm_instance()
1046 1046
1047 1047 try:
1048 1048 if target_ref_type in self.REF_TYPES:
1049 1049 target_commit = target_repo.get_commit(
1050 1050 target_ref_name, reference_obj=pull_request.target_ref_parts)
1051 1051 else:
1052 1052 target_commit = target_repo.get_commit(target_ref_id)
1053 1053 except CommitDoesNotExistError:
1054 1054 raise TargetRefMissing()
1055 1055
1056 1056 return source_commit, target_commit
1057 1057
1058 1058 def update_commits(self, pull_request, updating_user):
1059 1059 """
1060 1060 Get the updated list of commits for the pull request
1061 1061 and return the new pull request version and the list
1062 1062 of commits processed by this update action
1063 1063
1064 1064 updating_user is the user_object who triggered the update
1065 1065 """
1066 1066 pull_request = self.__get_pull_request(pull_request)
1067 1067 source_ref_type = pull_request.source_ref_parts.type
1068 1068 source_ref_name = pull_request.source_ref_parts.name
1069 1069 source_ref_id = pull_request.source_ref_parts.commit_id
1070 1070
1071 1071 target_ref_type = pull_request.target_ref_parts.type
1072 1072 target_ref_name = pull_request.target_ref_parts.name
1073 1073 target_ref_id = pull_request.target_ref_parts.commit_id
1074 1074
1075 1075 if not self.has_valid_update_type(pull_request):
1076 1076 log.debug("Skipping update of pull request %s due to ref type: %s",
1077 1077 pull_request, source_ref_type)
1078 1078 return UpdateResponse(
1079 1079 executed=False,
1080 1080 reason=UpdateFailureReason.WRONG_REF_TYPE,
1081 1081 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1082 1082 source_changed=False, target_changed=False)
1083 1083
1084 1084 try:
1085 1085 source_commit, target_commit = self.get_flow_commits(pull_request)
1086 1086 except SourceRefMissing:
1087 1087 return UpdateResponse(
1088 1088 executed=False,
1089 1089 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1090 1090 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1091 1091 source_changed=False, target_changed=False)
1092 1092 except TargetRefMissing:
1093 1093 return UpdateResponse(
1094 1094 executed=False,
1095 1095 reason=UpdateFailureReason.MISSING_TARGET_REF,
1096 1096 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1097 1097 source_changed=False, target_changed=False)
1098 1098
1099 1099 source_changed = source_ref_id != source_commit.raw_id
1100 1100 target_changed = target_ref_id != target_commit.raw_id
1101 1101
1102 1102 if not (source_changed or target_changed):
1103 1103 log.debug("Nothing changed in pull request %s", pull_request)
1104 1104 return UpdateResponse(
1105 1105 executed=False,
1106 1106 reason=UpdateFailureReason.NO_CHANGE,
1107 1107 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1108 1108 source_changed=target_changed, target_changed=source_changed)
1109 1109
1110 1110 change_in_found = 'target repo' if target_changed else 'source repo'
1111 1111 log.debug('Updating pull request because of change in %s detected',
1112 1112 change_in_found)
1113 1113
1114 1114 # Finally there is a need for an update, in case of source change
1115 1115 # we create a new version, else just an update
1116 1116 if source_changed:
1117 1117 pull_request_version = self._create_version_from_snapshot(pull_request)
1118 1118 self._link_comments_to_version(pull_request_version)
1119 1119 else:
1120 1120 try:
1121 1121 ver = pull_request.versions[-1]
1122 1122 except IndexError:
1123 1123 ver = None
1124 1124
1125 1125 pull_request.pull_request_version_id = \
1126 1126 ver.pull_request_version_id if ver else None
1127 1127 pull_request_version = pull_request
1128 1128
1129 1129 source_repo = pull_request.source_repo.scm_instance()
1130 1130 target_repo = pull_request.target_repo.scm_instance()
1131 1131
1132 1132 # re-compute commit ids
1133 1133 old_commit_ids = pull_request.revisions
1134 1134 pre_load = ["author", "date", "message", "branch"]
1135 1135 commit_ranges = target_repo.compare(
1136 1136 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1137 1137 pre_load=pre_load)
1138 1138
1139 1139 target_ref = target_commit.raw_id
1140 1140 source_ref = source_commit.raw_id
1141 1141 ancestor_commit_id = target_repo.get_common_ancestor(
1142 1142 target_ref, source_ref, source_repo)
1143 1143
1144 1144 if not ancestor_commit_id:
1145 1145 raise ValueError(
1146 1146 'cannot calculate diff info without a common ancestor. '
1147 1147 'Make sure both repositories are related, and have a common forking commit.')
1148 1148
1149 1149 pull_request.common_ancestor_id = ancestor_commit_id
1150 1150
1151 1151 pull_request.source_ref = '%s:%s:%s' % (
1152 1152 source_ref_type, source_ref_name, source_commit.raw_id)
1153 1153 pull_request.target_ref = '%s:%s:%s' % (
1154 1154 target_ref_type, target_ref_name, ancestor_commit_id)
1155 1155
1156 1156 pull_request.revisions = [
1157 1157 commit.raw_id for commit in reversed(commit_ranges)]
1158 1158 pull_request.updated_on = datetime.datetime.now()
1159 1159 Session().add(pull_request)
1160 1160 new_commit_ids = pull_request.revisions
1161 1161
1162 1162 old_diff_data, new_diff_data = self._generate_update_diffs(
1163 1163 pull_request, pull_request_version)
1164 1164
1165 1165 # calculate commit and file changes
1166 1166 commit_changes = self._calculate_commit_id_changes(
1167 1167 old_commit_ids, new_commit_ids)
1168 1168 file_changes = self._calculate_file_changes(
1169 1169 old_diff_data, new_diff_data)
1170 1170
1171 1171 # set comments as outdated if DIFFS changed
1172 1172 CommentsModel().outdate_comments(
1173 1173 pull_request, old_diff_data=old_diff_data,
1174 1174 new_diff_data=new_diff_data)
1175 1175
1176 1176 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1177 1177 file_node_changes = (
1178 1178 file_changes.added or file_changes.modified or file_changes.removed)
1179 1179 pr_has_changes = valid_commit_changes or file_node_changes
1180 1180
1181 1181 # Add an automatic comment to the pull request, in case
1182 1182 # anything has changed
1183 1183 if pr_has_changes:
1184 1184 update_comment = CommentsModel().create(
1185 1185 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1186 1186 repo=pull_request.target_repo,
1187 1187 user=pull_request.author,
1188 1188 pull_request=pull_request,
1189 1189 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1190 1190
1191 1191 # Update status to "Under Review" for added commits
1192 1192 for commit_id in commit_changes.added:
1193 1193 ChangesetStatusModel().set_status(
1194 1194 repo=pull_request.source_repo,
1195 1195 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1196 1196 comment=update_comment,
1197 1197 user=pull_request.author,
1198 1198 pull_request=pull_request,
1199 1199 revision=commit_id)
1200 1200
1201 # initial commit
1202 Session().commit()
1203
1204 if pr_has_changes:
1201 1205 # send update email to users
1202 1206 try:
1203 1207 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1204 1208 ancestor_commit_id=ancestor_commit_id,
1205 1209 commit_changes=commit_changes,
1206 1210 file_changes=file_changes)
1211 Session().commit()
1207 1212 except Exception:
1208 1213 log.exception('Failed to send email notification to users')
1214 Session().rollback()
1209 1215
1210 1216 log.debug(
1211 1217 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1212 1218 'removed_ids: %s', pull_request.pull_request_id,
1213 1219 commit_changes.added, commit_changes.common, commit_changes.removed)
1214 1220 log.debug(
1215 1221 'Updated pull request with the following file changes: %s',
1216 1222 file_changes)
1217 1223
1218 1224 log.info(
1219 1225 "Updated pull request %s from commit %s to commit %s, "
1220 1226 "stored new version %s of this pull request.",
1221 1227 pull_request.pull_request_id, source_ref_id,
1222 1228 pull_request.source_ref_parts.commit_id,
1223 1229 pull_request_version.pull_request_version_id)
1224 Session().commit()
1230
1225 1231 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1226 1232
1227 1233 return UpdateResponse(
1228 1234 executed=True, reason=UpdateFailureReason.NONE,
1229 1235 old=pull_request, new=pull_request_version,
1230 1236 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1231 1237 source_changed=source_changed, target_changed=target_changed)
1232 1238
1233 1239 def _create_version_from_snapshot(self, pull_request):
1234 1240 version = PullRequestVersion()
1235 1241 version.title = pull_request.title
1236 1242 version.description = pull_request.description
1237 1243 version.status = pull_request.status
1238 1244 version.pull_request_state = pull_request.pull_request_state
1239 1245 version.created_on = datetime.datetime.now()
1240 1246 version.updated_on = pull_request.updated_on
1241 1247 version.user_id = pull_request.user_id
1242 1248 version.source_repo = pull_request.source_repo
1243 1249 version.source_ref = pull_request.source_ref
1244 1250 version.target_repo = pull_request.target_repo
1245 1251 version.target_ref = pull_request.target_ref
1246 1252
1247 1253 version._last_merge_source_rev = pull_request._last_merge_source_rev
1248 1254 version._last_merge_target_rev = pull_request._last_merge_target_rev
1249 1255 version.last_merge_status = pull_request.last_merge_status
1250 1256 version.last_merge_metadata = pull_request.last_merge_metadata
1251 1257 version.shadow_merge_ref = pull_request.shadow_merge_ref
1252 1258 version.merge_rev = pull_request.merge_rev
1253 1259 version.reviewer_data = pull_request.reviewer_data
1254 1260
1255 1261 version.revisions = pull_request.revisions
1256 1262 version.common_ancestor_id = pull_request.common_ancestor_id
1257 1263 version.pull_request = pull_request
1258 1264 Session().add(version)
1259 1265 Session().flush()
1260 1266
1261 1267 return version
1262 1268
1263 1269 def _generate_update_diffs(self, pull_request, pull_request_version):
1264 1270
1265 1271 diff_context = (
1266 1272 self.DIFF_CONTEXT +
1267 1273 CommentsModel.needed_extra_diff_context())
1268 1274 hide_whitespace_changes = False
1269 1275 source_repo = pull_request_version.source_repo
1270 1276 source_ref_id = pull_request_version.source_ref_parts.commit_id
1271 1277 target_ref_id = pull_request_version.target_ref_parts.commit_id
1272 1278 old_diff = self._get_diff_from_pr_or_version(
1273 1279 source_repo, source_ref_id, target_ref_id,
1274 1280 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1275 1281
1276 1282 source_repo = pull_request.source_repo
1277 1283 source_ref_id = pull_request.source_ref_parts.commit_id
1278 1284 target_ref_id = pull_request.target_ref_parts.commit_id
1279 1285
1280 1286 new_diff = self._get_diff_from_pr_or_version(
1281 1287 source_repo, source_ref_id, target_ref_id,
1282 1288 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1283 1289
1284 1290 old_diff_data = diffs.DiffProcessor(old_diff)
1285 1291 old_diff_data.prepare()
1286 1292 new_diff_data = diffs.DiffProcessor(new_diff)
1287 1293 new_diff_data.prepare()
1288 1294
1289 1295 return old_diff_data, new_diff_data
1290 1296
1291 1297 def _link_comments_to_version(self, pull_request_version):
1292 1298 """
1293 1299 Link all unlinked comments of this pull request to the given version.
1294 1300
1295 1301 :param pull_request_version: The `PullRequestVersion` to which
1296 1302 the comments shall be linked.
1297 1303
1298 1304 """
1299 1305 pull_request = pull_request_version.pull_request
1300 1306 comments = ChangesetComment.query()\
1301 1307 .filter(
1302 1308 # TODO: johbo: Should we query for the repo at all here?
1303 1309 # Pending decision on how comments of PRs are to be related
1304 1310 # to either the source repo, the target repo or no repo at all.
1305 1311 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1306 1312 ChangesetComment.pull_request == pull_request,
1307 1313 ChangesetComment.pull_request_version == None)\
1308 1314 .order_by(ChangesetComment.comment_id.asc())
1309 1315
1310 1316 # TODO: johbo: Find out why this breaks if it is done in a bulk
1311 1317 # operation.
1312 1318 for comment in comments:
1313 1319 comment.pull_request_version_id = (
1314 1320 pull_request_version.pull_request_version_id)
1315 1321 Session().add(comment)
1316 1322
1317 1323 def _calculate_commit_id_changes(self, old_ids, new_ids):
1318 1324 added = [x for x in new_ids if x not in old_ids]
1319 1325 common = [x for x in new_ids if x in old_ids]
1320 1326 removed = [x for x in old_ids if x not in new_ids]
1321 1327 total = new_ids
1322 1328 return ChangeTuple(added, common, removed, total)
1323 1329
1324 1330 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1325 1331
1326 1332 old_files = OrderedDict()
1327 1333 for diff_data in old_diff_data.parsed_diff:
1328 1334 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1329 1335
1330 1336 added_files = []
1331 1337 modified_files = []
1332 1338 removed_files = []
1333 1339 for diff_data in new_diff_data.parsed_diff:
1334 1340 new_filename = diff_data['filename']
1335 1341 new_hash = md5_safe(diff_data['raw_diff'])
1336 1342
1337 1343 old_hash = old_files.get(new_filename)
1338 1344 if not old_hash:
1339 1345 # file is not present in old diff, we have to figure out from parsed diff
1340 1346 # operation ADD/REMOVE
1341 1347 operations_dict = diff_data['stats']['ops']
1342 1348 if diffs.DEL_FILENODE in operations_dict:
1343 1349 removed_files.append(new_filename)
1344 1350 else:
1345 1351 added_files.append(new_filename)
1346 1352 else:
1347 1353 if new_hash != old_hash:
1348 1354 modified_files.append(new_filename)
1349 1355 # now remove a file from old, since we have seen it already
1350 1356 del old_files[new_filename]
1351 1357
1352 1358 # removed files is when there are present in old, but not in NEW,
1353 1359 # since we remove old files that are present in new diff, left-overs
1354 1360 # if any should be the removed files
1355 1361 removed_files.extend(old_files.keys())
1356 1362
1357 1363 return FileChangeTuple(added_files, modified_files, removed_files)
1358 1364
1359 1365 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1360 1366 """
1361 1367 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1362 1368 so it's always looking the same disregarding on which default
1363 1369 renderer system is using.
1364 1370
1365 1371 :param ancestor_commit_id: ancestor raw_id
1366 1372 :param changes: changes named tuple
1367 1373 :param file_changes: file changes named tuple
1368 1374
1369 1375 """
1370 1376 new_status = ChangesetStatus.get_status_lbl(
1371 1377 ChangesetStatus.STATUS_UNDER_REVIEW)
1372 1378
1373 1379 changed_files = (
1374 1380 file_changes.added + file_changes.modified + file_changes.removed)
1375 1381
1376 1382 params = {
1377 1383 'under_review_label': new_status,
1378 1384 'added_commits': changes.added,
1379 1385 'removed_commits': changes.removed,
1380 1386 'changed_files': changed_files,
1381 1387 'added_files': file_changes.added,
1382 1388 'modified_files': file_changes.modified,
1383 1389 'removed_files': file_changes.removed,
1384 1390 'ancestor_commit_id': ancestor_commit_id
1385 1391 }
1386 1392 renderer = RstTemplateRenderer()
1387 1393 return renderer.render('pull_request_update.mako', **params)
1388 1394
1389 1395 def edit(self, pull_request, title, description, description_renderer, user):
1390 1396 pull_request = self.__get_pull_request(pull_request)
1391 1397 old_data = pull_request.get_api_data(with_merge_state=False)
1392 1398 if pull_request.is_closed():
1393 1399 raise ValueError('This pull request is closed')
1394 1400 if title:
1395 1401 pull_request.title = title
1396 1402 pull_request.description = description
1397 1403 pull_request.updated_on = datetime.datetime.now()
1398 1404 pull_request.description_renderer = description_renderer
1399 1405 Session().add(pull_request)
1400 1406 self._log_audit_action(
1401 1407 'repo.pull_request.edit', {'old_data': old_data},
1402 1408 user, pull_request)
1403 1409
1404 1410 def update_reviewers(self, pull_request, reviewer_data, user):
1405 1411 """
1406 1412 Update the reviewers in the pull request
1407 1413
1408 1414 :param pull_request: the pr to update
1409 1415 :param reviewer_data: list of tuples
1410 1416 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1411 1417 :param user: current use who triggers this action
1412 1418 """
1413 1419
1414 1420 pull_request = self.__get_pull_request(pull_request)
1415 1421 if pull_request.is_closed():
1416 1422 raise ValueError('This pull request is closed')
1417 1423
1418 1424 reviewers = {}
1419 1425 for user_id, reasons, mandatory, role, rules in reviewer_data:
1420 1426 if isinstance(user_id, (int, compat.string_types)):
1421 1427 user_id = self._get_user(user_id).user_id
1422 1428 reviewers[user_id] = {
1423 1429 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1424 1430
1425 1431 reviewers_ids = set(reviewers.keys())
1426 1432 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1427 1433 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1428 1434
1429 1435 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1430 1436
1431 1437 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1432 1438 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1433 1439
1434 1440 log.debug("Adding %s reviewers", ids_to_add)
1435 1441 log.debug("Removing %s reviewers", ids_to_remove)
1436 1442 changed = False
1437 1443 added_audit_reviewers = []
1438 1444 removed_audit_reviewers = []
1439 1445
1440 1446 for uid in ids_to_add:
1441 1447 changed = True
1442 1448 _usr = self._get_user(uid)
1443 1449 reviewer = PullRequestReviewers()
1444 1450 reviewer.user = _usr
1445 1451 reviewer.pull_request = pull_request
1446 1452 reviewer.reasons = reviewers[uid]['reasons']
1447 1453 # NOTE(marcink): mandatory shouldn't be changed now
1448 1454 # reviewer.mandatory = reviewers[uid]['reasons']
1449 1455 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1450 1456 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1451 1457 Session().add(reviewer)
1452 1458 added_audit_reviewers.append(reviewer.get_dict())
1453 1459
1454 1460 for uid in ids_to_remove:
1455 1461 changed = True
1456 1462 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1457 1463 # This is an edge case that handles previous state of having the same reviewer twice.
1458 1464 # this CAN happen due to the lack of DB checks
1459 1465 reviewers = PullRequestReviewers.query()\
1460 1466 .filter(PullRequestReviewers.user_id == uid,
1461 1467 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1462 1468 PullRequestReviewers.pull_request == pull_request)\
1463 1469 .all()
1464 1470
1465 1471 for obj in reviewers:
1466 1472 added_audit_reviewers.append(obj.get_dict())
1467 1473 Session().delete(obj)
1468 1474
1469 1475 if changed:
1470 1476 Session().expire_all()
1471 1477 pull_request.updated_on = datetime.datetime.now()
1472 1478 Session().add(pull_request)
1473 1479
1474 1480 # finally store audit logs
1475 1481 for user_data in added_audit_reviewers:
1476 1482 self._log_audit_action(
1477 1483 'repo.pull_request.reviewer.add', {'data': user_data},
1478 1484 user, pull_request)
1479 1485 for user_data in removed_audit_reviewers:
1480 1486 self._log_audit_action(
1481 1487 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1482 1488 user, pull_request)
1483 1489
1484 1490 self.notify_reviewers(pull_request, ids_to_add, user)
1485 1491 return ids_to_add, ids_to_remove
1486 1492
1487 1493 def update_observers(self, pull_request, observer_data, user):
1488 1494 """
1489 1495 Update the observers in the pull request
1490 1496
1491 1497 :param pull_request: the pr to update
1492 1498 :param observer_data: list of tuples
1493 1499 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1494 1500 :param user: current use who triggers this action
1495 1501 """
1496 1502 pull_request = self.__get_pull_request(pull_request)
1497 1503 if pull_request.is_closed():
1498 1504 raise ValueError('This pull request is closed')
1499 1505
1500 1506 observers = {}
1501 1507 for user_id, reasons, mandatory, role, rules in observer_data:
1502 1508 if isinstance(user_id, (int, compat.string_types)):
1503 1509 user_id = self._get_user(user_id).user_id
1504 1510 observers[user_id] = {
1505 1511 'reasons': reasons, 'observers': mandatory, 'role': role}
1506 1512
1507 1513 observers_ids = set(observers.keys())
1508 1514 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1509 1515 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1510 1516
1511 1517 current_observers_ids = set([x.user.user_id for x in current_observers])
1512 1518
1513 1519 ids_to_add = observers_ids.difference(current_observers_ids)
1514 1520 ids_to_remove = current_observers_ids.difference(observers_ids)
1515 1521
1516 1522 log.debug("Adding %s observer", ids_to_add)
1517 1523 log.debug("Removing %s observer", ids_to_remove)
1518 1524 changed = False
1519 1525 added_audit_observers = []
1520 1526 removed_audit_observers = []
1521 1527
1522 1528 for uid in ids_to_add:
1523 1529 changed = True
1524 1530 _usr = self._get_user(uid)
1525 1531 observer = PullRequestReviewers()
1526 1532 observer.user = _usr
1527 1533 observer.pull_request = pull_request
1528 1534 observer.reasons = observers[uid]['reasons']
1529 1535 # NOTE(marcink): mandatory shouldn't be changed now
1530 1536 # observer.mandatory = observer[uid]['reasons']
1531 1537
1532 1538 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1533 1539 observer.role = PullRequestReviewers.ROLE_OBSERVER
1534 1540 Session().add(observer)
1535 1541 added_audit_observers.append(observer.get_dict())
1536 1542
1537 1543 for uid in ids_to_remove:
1538 1544 changed = True
1539 1545 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1540 1546 # This is an edge case that handles previous state of having the same reviewer twice.
1541 1547 # this CAN happen due to the lack of DB checks
1542 1548 observers = PullRequestReviewers.query()\
1543 1549 .filter(PullRequestReviewers.user_id == uid,
1544 1550 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1545 1551 PullRequestReviewers.pull_request == pull_request)\
1546 1552 .all()
1547 1553
1548 1554 for obj in observers:
1549 1555 added_audit_observers.append(obj.get_dict())
1550 1556 Session().delete(obj)
1551 1557
1552 1558 if changed:
1553 1559 Session().expire_all()
1554 1560 pull_request.updated_on = datetime.datetime.now()
1555 1561 Session().add(pull_request)
1556 1562
1557 1563 # finally store audit logs
1558 1564 for user_data in added_audit_observers:
1559 1565 self._log_audit_action(
1560 1566 'repo.pull_request.observer.add', {'data': user_data},
1561 1567 user, pull_request)
1562 1568 for user_data in removed_audit_observers:
1563 1569 self._log_audit_action(
1564 1570 'repo.pull_request.observer.delete', {'old_data': user_data},
1565 1571 user, pull_request)
1566 1572
1567 1573 self.notify_observers(pull_request, ids_to_add, user)
1568 1574 return ids_to_add, ids_to_remove
1569 1575
1570 1576 def get_url(self, pull_request, request=None, permalink=False):
1571 1577 if not request:
1572 1578 request = get_current_request()
1573 1579
1574 1580 if permalink:
1575 1581 return request.route_url(
1576 1582 'pull_requests_global',
1577 1583 pull_request_id=pull_request.pull_request_id,)
1578 1584 else:
1579 1585 return request.route_url('pullrequest_show',
1580 1586 repo_name=safe_str(pull_request.target_repo.repo_name),
1581 1587 pull_request_id=pull_request.pull_request_id,)
1582 1588
1583 1589 def get_shadow_clone_url(self, pull_request, request=None):
1584 1590 """
1585 1591 Returns qualified url pointing to the shadow repository. If this pull
1586 1592 request is closed there is no shadow repository and ``None`` will be
1587 1593 returned.
1588 1594 """
1589 1595 if pull_request.is_closed():
1590 1596 return None
1591 1597 else:
1592 1598 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1593 1599 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1594 1600
1595 1601 def _notify_reviewers(self, pull_request, user_ids, role, user):
1596 1602 # notification to reviewers/observers
1597 1603 if not user_ids:
1598 1604 return
1599 1605
1600 1606 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1601 1607
1602 1608 pull_request_obj = pull_request
1603 1609 # get the current participants of this pull request
1604 1610 recipients = user_ids
1605 1611 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1606 1612
1607 1613 pr_source_repo = pull_request_obj.source_repo
1608 1614 pr_target_repo = pull_request_obj.target_repo
1609 1615
1610 1616 pr_url = h.route_url('pullrequest_show',
1611 1617 repo_name=pr_target_repo.repo_name,
1612 1618 pull_request_id=pull_request_obj.pull_request_id,)
1613 1619
1614 1620 # set some variables for email notification
1615 1621 pr_target_repo_url = h.route_url(
1616 1622 'repo_summary', repo_name=pr_target_repo.repo_name)
1617 1623
1618 1624 pr_source_repo_url = h.route_url(
1619 1625 'repo_summary', repo_name=pr_source_repo.repo_name)
1620 1626
1621 1627 # pull request specifics
1622 1628 pull_request_commits = [
1623 1629 (x.raw_id, x.message)
1624 1630 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1625 1631
1626 1632 current_rhodecode_user = user
1627 1633 kwargs = {
1628 1634 'user': current_rhodecode_user,
1629 1635 'pull_request_author': pull_request.author,
1630 1636 'pull_request': pull_request_obj,
1631 1637 'pull_request_commits': pull_request_commits,
1632 1638
1633 1639 'pull_request_target_repo': pr_target_repo,
1634 1640 'pull_request_target_repo_url': pr_target_repo_url,
1635 1641
1636 1642 'pull_request_source_repo': pr_source_repo,
1637 1643 'pull_request_source_repo_url': pr_source_repo_url,
1638 1644
1639 1645 'pull_request_url': pr_url,
1640 1646 'thread_ids': [pr_url],
1641 1647 'user_role': role
1642 1648 }
1643 1649
1644 1650 # create notification objects, and emails
1645 1651 NotificationModel().create(
1646 1652 created_by=current_rhodecode_user,
1647 1653 notification_subject='', # Filled in based on the notification_type
1648 1654 notification_body='', # Filled in based on the notification_type
1649 1655 notification_type=notification_type,
1650 1656 recipients=recipients,
1651 1657 email_kwargs=kwargs,
1652 1658 )
1653 1659
1654 1660 def notify_reviewers(self, pull_request, reviewers_ids, user):
1655 1661 return self._notify_reviewers(pull_request, reviewers_ids,
1656 1662 PullRequestReviewers.ROLE_REVIEWER, user)
1657 1663
1658 1664 def notify_observers(self, pull_request, observers_ids, user):
1659 1665 return self._notify_reviewers(pull_request, observers_ids,
1660 1666 PullRequestReviewers.ROLE_OBSERVER, user)
1661 1667
1662 1668 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1663 1669 commit_changes, file_changes):
1664 1670
1665 1671 updating_user_id = updating_user.user_id
1666 1672 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1667 1673 # NOTE(marcink): send notification to all other users except to
1668 1674 # person who updated the PR
1669 1675 recipients = reviewers.difference(set([updating_user_id]))
1670 1676
1671 1677 log.debug('Notify following recipients about pull-request update %s', recipients)
1672 1678
1673 1679 pull_request_obj = pull_request
1674 1680
1675 1681 # send email about the update
1676 1682 changed_files = (
1677 1683 file_changes.added + file_changes.modified + file_changes.removed)
1678 1684
1679 1685 pr_source_repo = pull_request_obj.source_repo
1680 1686 pr_target_repo = pull_request_obj.target_repo
1681 1687
1682 1688 pr_url = h.route_url('pullrequest_show',
1683 1689 repo_name=pr_target_repo.repo_name,
1684 1690 pull_request_id=pull_request_obj.pull_request_id,)
1685 1691
1686 1692 # set some variables for email notification
1687 1693 pr_target_repo_url = h.route_url(
1688 1694 'repo_summary', repo_name=pr_target_repo.repo_name)
1689 1695
1690 1696 pr_source_repo_url = h.route_url(
1691 1697 'repo_summary', repo_name=pr_source_repo.repo_name)
1692 1698
1693 1699 email_kwargs = {
1694 1700 'date': datetime.datetime.now(),
1695 1701 'updating_user': updating_user,
1696 1702
1697 1703 'pull_request': pull_request_obj,
1698 1704
1699 1705 'pull_request_target_repo': pr_target_repo,
1700 1706 'pull_request_target_repo_url': pr_target_repo_url,
1701 1707
1702 1708 'pull_request_source_repo': pr_source_repo,
1703 1709 'pull_request_source_repo_url': pr_source_repo_url,
1704 1710
1705 1711 'pull_request_url': pr_url,
1706 1712
1707 1713 'ancestor_commit_id': ancestor_commit_id,
1708 1714 'added_commits': commit_changes.added,
1709 1715 'removed_commits': commit_changes.removed,
1710 1716 'changed_files': changed_files,
1711 1717 'added_files': file_changes.added,
1712 1718 'modified_files': file_changes.modified,
1713 1719 'removed_files': file_changes.removed,
1714 1720 'thread_ids': [pr_url],
1715 1721 }
1716 1722
1717 1723 # create notification objects, and emails
1718 1724 NotificationModel().create(
1719 1725 created_by=updating_user,
1720 1726 notification_subject='', # Filled in based on the notification_type
1721 1727 notification_body='', # Filled in based on the notification_type
1722 1728 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1723 1729 recipients=recipients,
1724 1730 email_kwargs=email_kwargs,
1725 1731 )
1726 1732
1727 1733 def delete(self, pull_request, user=None):
1728 1734 if not user:
1729 1735 user = getattr(get_current_rhodecode_user(), 'username', None)
1730 1736
1731 1737 pull_request = self.__get_pull_request(pull_request)
1732 1738 old_data = pull_request.get_api_data(with_merge_state=False)
1733 1739 self._cleanup_merge_workspace(pull_request)
1734 1740 self._log_audit_action(
1735 1741 'repo.pull_request.delete', {'old_data': old_data},
1736 1742 user, pull_request)
1737 1743 Session().delete(pull_request)
1738 1744
1739 1745 def close_pull_request(self, pull_request, user):
1740 1746 pull_request = self.__get_pull_request(pull_request)
1741 1747 self._cleanup_merge_workspace(pull_request)
1742 1748 pull_request.status = PullRequest.STATUS_CLOSED
1743 1749 pull_request.updated_on = datetime.datetime.now()
1744 1750 Session().add(pull_request)
1745 1751 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1746 1752
1747 1753 pr_data = pull_request.get_api_data(with_merge_state=False)
1748 1754 self._log_audit_action(
1749 1755 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1750 1756
1751 1757 def close_pull_request_with_comment(
1752 1758 self, pull_request, user, repo, message=None, auth_user=None):
1753 1759
1754 1760 pull_request_review_status = pull_request.calculated_review_status()
1755 1761
1756 1762 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1757 1763 # approved only if we have voting consent
1758 1764 status = ChangesetStatus.STATUS_APPROVED
1759 1765 else:
1760 1766 status = ChangesetStatus.STATUS_REJECTED
1761 1767 status_lbl = ChangesetStatus.get_status_lbl(status)
1762 1768
1763 1769 default_message = (
1764 1770 'Closing with status change {transition_icon} {status}.'
1765 1771 ).format(transition_icon='>', status=status_lbl)
1766 1772 text = message or default_message
1767 1773
1768 1774 # create a comment, and link it to new status
1769 1775 comment = CommentsModel().create(
1770 1776 text=text,
1771 1777 repo=repo.repo_id,
1772 1778 user=user.user_id,
1773 1779 pull_request=pull_request.pull_request_id,
1774 1780 status_change=status_lbl,
1775 1781 status_change_type=status,
1776 1782 closing_pr=True,
1777 1783 auth_user=auth_user,
1778 1784 )
1779 1785
1780 1786 # calculate old status before we change it
1781 1787 old_calculated_status = pull_request.calculated_review_status()
1782 1788 ChangesetStatusModel().set_status(
1783 1789 repo.repo_id,
1784 1790 status,
1785 1791 user.user_id,
1786 1792 comment=comment,
1787 1793 pull_request=pull_request.pull_request_id
1788 1794 )
1789 1795
1790 1796 Session().flush()
1791 1797
1792 1798 self.trigger_pull_request_hook(pull_request, user, 'comment',
1793 1799 data={'comment': comment})
1794 1800
1795 1801 # we now calculate the status of pull request again, and based on that
1796 1802 # calculation trigger status change. This might happen in cases
1797 1803 # that non-reviewer admin closes a pr, which means his vote doesn't
1798 1804 # change the status, while if he's a reviewer this might change it.
1799 1805 calculated_status = pull_request.calculated_review_status()
1800 1806 if old_calculated_status != calculated_status:
1801 1807 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1802 1808 data={'status': calculated_status})
1803 1809
1804 1810 # finally close the PR
1805 1811 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1806 1812
1807 1813 return comment, status
1808 1814
1809 1815 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1810 1816 _ = translator or get_current_request().translate
1811 1817
1812 1818 if not self._is_merge_enabled(pull_request):
1813 1819 return None, False, _('Server-side pull request merging is disabled.')
1814 1820
1815 1821 if pull_request.is_closed():
1816 1822 return None, False, _('This pull request is closed.')
1817 1823
1818 1824 merge_possible, msg = self._check_repo_requirements(
1819 1825 target=pull_request.target_repo, source=pull_request.source_repo,
1820 1826 translator=_)
1821 1827 if not merge_possible:
1822 1828 return None, merge_possible, msg
1823 1829
1824 1830 try:
1825 1831 merge_response = self._try_merge(
1826 1832 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1827 1833 log.debug("Merge response: %s", merge_response)
1828 1834 return merge_response, merge_response.possible, merge_response.merge_status_message
1829 1835 except NotImplementedError:
1830 1836 return None, False, _('Pull request merging is not supported.')
1831 1837
1832 1838 def _check_repo_requirements(self, target, source, translator):
1833 1839 """
1834 1840 Check if `target` and `source` have compatible requirements.
1835 1841
1836 1842 Currently this is just checking for largefiles.
1837 1843 """
1838 1844 _ = translator
1839 1845 target_has_largefiles = self._has_largefiles(target)
1840 1846 source_has_largefiles = self._has_largefiles(source)
1841 1847 merge_possible = True
1842 1848 message = u''
1843 1849
1844 1850 if target_has_largefiles != source_has_largefiles:
1845 1851 merge_possible = False
1846 1852 if source_has_largefiles:
1847 1853 message = _(
1848 1854 'Target repository large files support is disabled.')
1849 1855 else:
1850 1856 message = _(
1851 1857 'Source repository large files support is disabled.')
1852 1858
1853 1859 return merge_possible, message
1854 1860
1855 1861 def _has_largefiles(self, repo):
1856 1862 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1857 1863 'extensions', 'largefiles')
1858 1864 return largefiles_ui and largefiles_ui[0].active
1859 1865
1860 1866 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1861 1867 """
1862 1868 Try to merge the pull request and return the merge status.
1863 1869 """
1864 1870 log.debug(
1865 1871 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1866 1872 pull_request.pull_request_id, force_shadow_repo_refresh)
1867 1873 target_vcs = pull_request.target_repo.scm_instance()
1868 1874 # Refresh the target reference.
1869 1875 try:
1870 1876 target_ref = self._refresh_reference(
1871 1877 pull_request.target_ref_parts, target_vcs)
1872 1878 except CommitDoesNotExistError:
1873 1879 merge_state = MergeResponse(
1874 1880 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1875 1881 metadata={'target_ref': pull_request.target_ref_parts})
1876 1882 return merge_state
1877 1883
1878 1884 target_locked = pull_request.target_repo.locked
1879 1885 if target_locked and target_locked[0]:
1880 1886 locked_by = 'user:{}'.format(target_locked[0])
1881 1887 log.debug("The target repository is locked by %s.", locked_by)
1882 1888 merge_state = MergeResponse(
1883 1889 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1884 1890 metadata={'locked_by': locked_by})
1885 1891 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1886 1892 pull_request, target_ref):
1887 1893 log.debug("Refreshing the merge status of the repository.")
1888 1894 merge_state = self._refresh_merge_state(
1889 1895 pull_request, target_vcs, target_ref)
1890 1896 else:
1891 1897 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1892 1898 metadata = {
1893 1899 'unresolved_files': '',
1894 1900 'target_ref': pull_request.target_ref_parts,
1895 1901 'source_ref': pull_request.source_ref_parts,
1896 1902 }
1897 1903 if pull_request.last_merge_metadata:
1898 1904 metadata.update(pull_request.last_merge_metadata_parsed)
1899 1905
1900 1906 if not possible and target_ref.type == 'branch':
1901 1907 # NOTE(marcink): case for mercurial multiple heads on branch
1902 1908 heads = target_vcs._heads(target_ref.name)
1903 1909 if len(heads) != 1:
1904 1910 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1905 1911 metadata.update({
1906 1912 'heads': heads
1907 1913 })
1908 1914
1909 1915 merge_state = MergeResponse(
1910 1916 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1911 1917
1912 1918 return merge_state
1913 1919
1914 1920 def _refresh_reference(self, reference, vcs_repository):
1915 1921 if reference.type in self.UPDATABLE_REF_TYPES:
1916 1922 name_or_id = reference.name
1917 1923 else:
1918 1924 name_or_id = reference.commit_id
1919 1925
1920 1926 refreshed_commit = vcs_repository.get_commit(name_or_id)
1921 1927 refreshed_reference = Reference(
1922 1928 reference.type, reference.name, refreshed_commit.raw_id)
1923 1929 return refreshed_reference
1924 1930
1925 1931 def _needs_merge_state_refresh(self, pull_request, target_reference):
1926 1932 return not(
1927 1933 pull_request.revisions and
1928 1934 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1929 1935 target_reference.commit_id == pull_request._last_merge_target_rev)
1930 1936
1931 1937 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1932 1938 workspace_id = self._workspace_id(pull_request)
1933 1939 source_vcs = pull_request.source_repo.scm_instance()
1934 1940 repo_id = pull_request.target_repo.repo_id
1935 1941 use_rebase = self._use_rebase_for_merging(pull_request)
1936 1942 close_branch = self._close_branch_before_merging(pull_request)
1937 1943 merge_state = target_vcs.merge(
1938 1944 repo_id, workspace_id,
1939 1945 target_reference, source_vcs, pull_request.source_ref_parts,
1940 1946 dry_run=True, use_rebase=use_rebase,
1941 1947 close_branch=close_branch)
1942 1948
1943 1949 # Do not store the response if there was an unknown error.
1944 1950 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1945 1951 pull_request._last_merge_source_rev = \
1946 1952 pull_request.source_ref_parts.commit_id
1947 1953 pull_request._last_merge_target_rev = target_reference.commit_id
1948 1954 pull_request.last_merge_status = merge_state.failure_reason
1949 1955 pull_request.last_merge_metadata = merge_state.metadata
1950 1956
1951 1957 pull_request.shadow_merge_ref = merge_state.merge_ref
1952 1958 Session().add(pull_request)
1953 1959 Session().commit()
1954 1960
1955 1961 return merge_state
1956 1962
1957 1963 def _workspace_id(self, pull_request):
1958 1964 workspace_id = 'pr-%s' % pull_request.pull_request_id
1959 1965 return workspace_id
1960 1966
1961 1967 def generate_repo_data(self, repo, commit_id=None, branch=None,
1962 1968 bookmark=None, translator=None):
1963 1969 from rhodecode.model.repo import RepoModel
1964 1970
1965 1971 all_refs, selected_ref = \
1966 1972 self._get_repo_pullrequest_sources(
1967 1973 repo.scm_instance(), commit_id=commit_id,
1968 1974 branch=branch, bookmark=bookmark, translator=translator)
1969 1975
1970 1976 refs_select2 = []
1971 1977 for element in all_refs:
1972 1978 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1973 1979 refs_select2.append({'text': element[1], 'children': children})
1974 1980
1975 1981 return {
1976 1982 'user': {
1977 1983 'user_id': repo.user.user_id,
1978 1984 'username': repo.user.username,
1979 1985 'firstname': repo.user.first_name,
1980 1986 'lastname': repo.user.last_name,
1981 1987 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1982 1988 },
1983 1989 'name': repo.repo_name,
1984 1990 'link': RepoModel().get_url(repo),
1985 1991 'description': h.chop_at_smart(repo.description_safe, '\n'),
1986 1992 'refs': {
1987 1993 'all_refs': all_refs,
1988 1994 'selected_ref': selected_ref,
1989 1995 'select2_refs': refs_select2
1990 1996 }
1991 1997 }
1992 1998
1993 1999 def generate_pullrequest_title(self, source, source_ref, target):
1994 2000 return u'{source}#{at_ref} to {target}'.format(
1995 2001 source=source,
1996 2002 at_ref=source_ref,
1997 2003 target=target,
1998 2004 )
1999 2005
2000 2006 def _cleanup_merge_workspace(self, pull_request):
2001 2007 # Merging related cleanup
2002 2008 repo_id = pull_request.target_repo.repo_id
2003 2009 target_scm = pull_request.target_repo.scm_instance()
2004 2010 workspace_id = self._workspace_id(pull_request)
2005 2011
2006 2012 try:
2007 2013 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2008 2014 except NotImplementedError:
2009 2015 pass
2010 2016
2011 2017 def _get_repo_pullrequest_sources(
2012 2018 self, repo, commit_id=None, branch=None, bookmark=None,
2013 2019 translator=None):
2014 2020 """
2015 2021 Return a structure with repo's interesting commits, suitable for
2016 2022 the selectors in pullrequest controller
2017 2023
2018 2024 :param commit_id: a commit that must be in the list somehow
2019 2025 and selected by default
2020 2026 :param branch: a branch that must be in the list and selected
2021 2027 by default - even if closed
2022 2028 :param bookmark: a bookmark that must be in the list and selected
2023 2029 """
2024 2030 _ = translator or get_current_request().translate
2025 2031
2026 2032 commit_id = safe_str(commit_id) if commit_id else None
2027 2033 branch = safe_unicode(branch) if branch else None
2028 2034 bookmark = safe_unicode(bookmark) if bookmark else None
2029 2035
2030 2036 selected = None
2031 2037
2032 2038 # order matters: first source that has commit_id in it will be selected
2033 2039 sources = []
2034 2040 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2035 2041 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2036 2042
2037 2043 if commit_id:
2038 2044 ref_commit = (h.short_id(commit_id), commit_id)
2039 2045 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2040 2046
2041 2047 sources.append(
2042 2048 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2043 2049 )
2044 2050
2045 2051 groups = []
2046 2052
2047 2053 for group_key, ref_list, group_name, match in sources:
2048 2054 group_refs = []
2049 2055 for ref_name, ref_id in ref_list:
2050 2056 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2051 2057 group_refs.append((ref_key, ref_name))
2052 2058
2053 2059 if not selected:
2054 2060 if set([commit_id, match]) & set([ref_id, ref_name]):
2055 2061 selected = ref_key
2056 2062
2057 2063 if group_refs:
2058 2064 groups.append((group_refs, group_name))
2059 2065
2060 2066 if not selected:
2061 2067 ref = commit_id or branch or bookmark
2062 2068 if ref:
2063 2069 raise CommitDoesNotExistError(
2064 2070 u'No commit refs could be found matching: {}'.format(ref))
2065 2071 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2066 2072 selected = u'branch:{}:{}'.format(
2067 2073 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2068 2074 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2069 2075 )
2070 2076 elif repo.commit_ids:
2071 2077 # make the user select in this case
2072 2078 selected = None
2073 2079 else:
2074 2080 raise EmptyRepositoryError()
2075 2081 return groups, selected
2076 2082
2077 2083 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2078 2084 hide_whitespace_changes, diff_context):
2079 2085
2080 2086 return self._get_diff_from_pr_or_version(
2081 2087 source_repo, source_ref_id, target_ref_id,
2082 2088 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2083 2089
2084 2090 def _get_diff_from_pr_or_version(
2085 2091 self, source_repo, source_ref_id, target_ref_id,
2086 2092 hide_whitespace_changes, diff_context):
2087 2093
2088 2094 target_commit = source_repo.get_commit(
2089 2095 commit_id=safe_str(target_ref_id))
2090 2096 source_commit = source_repo.get_commit(
2091 2097 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2092 2098 if isinstance(source_repo, Repository):
2093 2099 vcs_repo = source_repo.scm_instance()
2094 2100 else:
2095 2101 vcs_repo = source_repo
2096 2102
2097 2103 # TODO: johbo: In the context of an update, we cannot reach
2098 2104 # the old commit anymore with our normal mechanisms. It needs
2099 2105 # some sort of special support in the vcs layer to avoid this
2100 2106 # workaround.
2101 2107 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2102 2108 vcs_repo.alias == 'git'):
2103 2109 source_commit.raw_id = safe_str(source_ref_id)
2104 2110
2105 2111 log.debug('calculating diff between '
2106 2112 'source_ref:%s and target_ref:%s for repo `%s`',
2107 2113 target_ref_id, source_ref_id,
2108 2114 safe_unicode(vcs_repo.path))
2109 2115
2110 2116 vcs_diff = vcs_repo.get_diff(
2111 2117 commit1=target_commit, commit2=source_commit,
2112 2118 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2113 2119 return vcs_diff
2114 2120
2115 2121 def _is_merge_enabled(self, pull_request):
2116 2122 return self._get_general_setting(
2117 2123 pull_request, 'rhodecode_pr_merge_enabled')
2118 2124
2119 2125 def _use_rebase_for_merging(self, pull_request):
2120 2126 repo_type = pull_request.target_repo.repo_type
2121 2127 if repo_type == 'hg':
2122 2128 return self._get_general_setting(
2123 2129 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2124 2130 elif repo_type == 'git':
2125 2131 return self._get_general_setting(
2126 2132 pull_request, 'rhodecode_git_use_rebase_for_merging')
2127 2133
2128 2134 return False
2129 2135
2130 2136 def _user_name_for_merging(self, pull_request, user):
2131 2137 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2132 2138 if env_user_name_attr and hasattr(user, env_user_name_attr):
2133 2139 user_name_attr = env_user_name_attr
2134 2140 else:
2135 2141 user_name_attr = 'short_contact'
2136 2142
2137 2143 user_name = getattr(user, user_name_attr)
2138 2144 return user_name
2139 2145
2140 2146 def _close_branch_before_merging(self, pull_request):
2141 2147 repo_type = pull_request.target_repo.repo_type
2142 2148 if repo_type == 'hg':
2143 2149 return self._get_general_setting(
2144 2150 pull_request, 'rhodecode_hg_close_branch_before_merging')
2145 2151 elif repo_type == 'git':
2146 2152 return self._get_general_setting(
2147 2153 pull_request, 'rhodecode_git_close_branch_before_merging')
2148 2154
2149 2155 return False
2150 2156
2151 2157 def _get_general_setting(self, pull_request, settings_key, default=False):
2152 2158 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2153 2159 settings = settings_model.get_general_settings()
2154 2160 return settings.get(settings_key, default)
2155 2161
2156 2162 def _log_audit_action(self, action, action_data, user, pull_request):
2157 2163 audit_logger.store(
2158 2164 action=action,
2159 2165 action_data=action_data,
2160 2166 user=user,
2161 2167 repo=pull_request.target_repo)
2162 2168
2163 2169 def get_reviewer_functions(self):
2164 2170 """
2165 2171 Fetches functions for validation and fetching default reviewers.
2166 2172 If available we use the EE package, else we fallback to CE
2167 2173 package functions
2168 2174 """
2169 2175 try:
2170 2176 from rc_reviewers.utils import get_default_reviewers_data
2171 2177 from rc_reviewers.utils import validate_default_reviewers
2172 2178 from rc_reviewers.utils import validate_observers
2173 2179 except ImportError:
2174 2180 from rhodecode.apps.repository.utils import get_default_reviewers_data
2175 2181 from rhodecode.apps.repository.utils import validate_default_reviewers
2176 2182 from rhodecode.apps.repository.utils import validate_observers
2177 2183
2178 2184 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2179 2185
2180 2186
2181 2187 class MergeCheck(object):
2182 2188 """
2183 2189 Perform Merge Checks and returns a check object which stores information
2184 2190 about merge errors, and merge conditions
2185 2191 """
2186 2192 TODO_CHECK = 'todo'
2187 2193 PERM_CHECK = 'perm'
2188 2194 REVIEW_CHECK = 'review'
2189 2195 MERGE_CHECK = 'merge'
2190 2196 WIP_CHECK = 'wip'
2191 2197
2192 2198 def __init__(self):
2193 2199 self.review_status = None
2194 2200 self.merge_possible = None
2195 2201 self.merge_msg = ''
2196 2202 self.merge_response = None
2197 2203 self.failed = None
2198 2204 self.errors = []
2199 2205 self.error_details = OrderedDict()
2200 2206 self.source_commit = AttributeDict()
2201 2207 self.target_commit = AttributeDict()
2202 2208 self.reviewers_count = 0
2203 2209 self.observers_count = 0
2204 2210
2205 2211 def __repr__(self):
2206 2212 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2207 2213 self.merge_possible, self.failed, self.errors)
2208 2214
2209 2215 def push_error(self, error_type, message, error_key, details):
2210 2216 self.failed = True
2211 2217 self.errors.append([error_type, message])
2212 2218 self.error_details[error_key] = dict(
2213 2219 details=details,
2214 2220 error_type=error_type,
2215 2221 message=message
2216 2222 )
2217 2223
2218 2224 @classmethod
2219 2225 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2220 2226 force_shadow_repo_refresh=False):
2221 2227 _ = translator
2222 2228 merge_check = cls()
2223 2229
2224 2230 # title has WIP:
2225 2231 if pull_request.work_in_progress:
2226 2232 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2227 2233
2228 2234 msg = _('WIP marker in title prevents from accidental merge.')
2229 2235 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2230 2236 if fail_early:
2231 2237 return merge_check
2232 2238
2233 2239 # permissions to merge
2234 2240 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2235 2241 if not user_allowed_to_merge:
2236 2242 log.debug("MergeCheck: cannot merge, approval is pending.")
2237 2243
2238 2244 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2239 2245 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2240 2246 if fail_early:
2241 2247 return merge_check
2242 2248
2243 2249 # permission to merge into the target branch
2244 2250 target_commit_id = pull_request.target_ref_parts.commit_id
2245 2251 if pull_request.target_ref_parts.type == 'branch':
2246 2252 branch_name = pull_request.target_ref_parts.name
2247 2253 else:
2248 2254 # for mercurial we can always figure out the branch from the commit
2249 2255 # in case of bookmark
2250 2256 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2251 2257 branch_name = target_commit.branch
2252 2258
2253 2259 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2254 2260 pull_request.target_repo.repo_name, branch_name)
2255 2261 if branch_perm and branch_perm == 'branch.none':
2256 2262 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2257 2263 branch_name, rule)
2258 2264 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2259 2265 if fail_early:
2260 2266 return merge_check
2261 2267
2262 2268 # review status, must be always present
2263 2269 review_status = pull_request.calculated_review_status()
2264 2270 merge_check.review_status = review_status
2265 2271 merge_check.reviewers_count = pull_request.reviewers_count
2266 2272 merge_check.observers_count = pull_request.observers_count
2267 2273
2268 2274 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2269 2275 if not status_approved and merge_check.reviewers_count:
2270 2276 log.debug("MergeCheck: cannot merge, approval is pending.")
2271 2277 msg = _('Pull request reviewer approval is pending.')
2272 2278
2273 2279 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2274 2280
2275 2281 if fail_early:
2276 2282 return merge_check
2277 2283
2278 2284 # left over TODOs
2279 2285 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2280 2286 if todos:
2281 2287 log.debug("MergeCheck: cannot merge, {} "
2282 2288 "unresolved TODOs left.".format(len(todos)))
2283 2289
2284 2290 if len(todos) == 1:
2285 2291 msg = _('Cannot merge, {} TODO still not resolved.').format(
2286 2292 len(todos))
2287 2293 else:
2288 2294 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2289 2295 len(todos))
2290 2296
2291 2297 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2292 2298
2293 2299 if fail_early:
2294 2300 return merge_check
2295 2301
2296 2302 # merge possible, here is the filesystem simulation + shadow repo
2297 2303 merge_response, merge_status, msg = PullRequestModel().merge_status(
2298 2304 pull_request, translator=translator,
2299 2305 force_shadow_repo_refresh=force_shadow_repo_refresh)
2300 2306
2301 2307 merge_check.merge_possible = merge_status
2302 2308 merge_check.merge_msg = msg
2303 2309 merge_check.merge_response = merge_response
2304 2310
2305 2311 source_ref_id = pull_request.source_ref_parts.commit_id
2306 2312 target_ref_id = pull_request.target_ref_parts.commit_id
2307 2313
2308 2314 try:
2309 2315 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2310 2316 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2311 2317 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2312 2318 merge_check.source_commit.current_raw_id = source_commit.raw_id
2313 2319 merge_check.source_commit.previous_raw_id = source_ref_id
2314 2320
2315 2321 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2316 2322 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2317 2323 merge_check.target_commit.current_raw_id = target_commit.raw_id
2318 2324 merge_check.target_commit.previous_raw_id = target_ref_id
2319 2325 except (SourceRefMissing, TargetRefMissing):
2320 2326 pass
2321 2327
2322 2328 if not merge_status:
2323 2329 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2324 2330 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2325 2331
2326 2332 if fail_early:
2327 2333 return merge_check
2328 2334
2329 2335 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2330 2336 return merge_check
2331 2337
2332 2338 @classmethod
2333 2339 def get_merge_conditions(cls, pull_request, translator):
2334 2340 _ = translator
2335 2341 merge_details = {}
2336 2342
2337 2343 model = PullRequestModel()
2338 2344 use_rebase = model._use_rebase_for_merging(pull_request)
2339 2345
2340 2346 if use_rebase:
2341 2347 merge_details['merge_strategy'] = dict(
2342 2348 details={},
2343 2349 message=_('Merge strategy: rebase')
2344 2350 )
2345 2351 else:
2346 2352 merge_details['merge_strategy'] = dict(
2347 2353 details={},
2348 2354 message=_('Merge strategy: explicit merge commit')
2349 2355 )
2350 2356
2351 2357 close_branch = model._close_branch_before_merging(pull_request)
2352 2358 if close_branch:
2353 2359 repo_type = pull_request.target_repo.repo_type
2354 2360 close_msg = ''
2355 2361 if repo_type == 'hg':
2356 2362 close_msg = _('Source branch will be closed before the merge.')
2357 2363 elif repo_type == 'git':
2358 2364 close_msg = _('Source branch will be deleted after the merge.')
2359 2365
2360 2366 merge_details['close_branch'] = dict(
2361 2367 details={},
2362 2368 message=close_msg
2363 2369 )
2364 2370
2365 2371 return merge_details
2366 2372
2367 2373
2368 2374 ChangeTuple = collections.namedtuple(
2369 2375 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2370 2376
2371 2377 FileChangeTuple = collections.namedtuple(
2372 2378 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now