##// END OF EJS Templates
pull-requests: limit the ammount of data saved in default reviewers data for better memory usage...
marcink -
r4509:b5299f6d stable
parent child Browse files
Show More
@@ -1,95 +1,111 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 from rhodecode.lib import helpers as h
21 from rhodecode.lib import helpers as h, rc_cache
22 22 from rhodecode.lib.utils2 import safe_int
23 23 from rhodecode.model.pull_request import get_diff_info
24 24 from rhodecode.model.db import PullRequestReviewers
25 25 # V3 - Reviewers, with default rules data
26 26 # v4 - Added observers metadata
27 27 REVIEWER_API_VERSION = 'V4'
28 28
29 29
30 30 def reviewer_as_json(user, reasons=None, role=None, mandatory=False, rules=None, user_group=None):
31 31 """
32 32 Returns json struct of a reviewer for frontend
33 33
34 34 :param user: the reviewer
35 35 :param reasons: list of strings of why they are reviewers
36 36 :param mandatory: bool, to set user as mandatory
37 37 """
38 38 role = role or PullRequestReviewers.ROLE_REVIEWER
39 39 if role not in PullRequestReviewers.ROLES:
40 40 raise ValueError('role is not one of %s', PullRequestReviewers.ROLES)
41 41
42 42 return {
43 43 'user_id': user.user_id,
44 44 'reasons': reasons or [],
45 45 'rules': rules or [],
46 46 'role': role,
47 47 'mandatory': mandatory,
48 48 'user_group': user_group,
49 49 'username': user.username,
50 50 'first_name': user.first_name,
51 51 'last_name': user.last_name,
52 52 'user_link': h.link_to_user(user),
53 53 'gravatar_link': h.gravatar_url(user.email, 14),
54 54 }
55 55
56 56
57 def get_default_reviewers_data(current_user, source_repo, source_commit, target_repo, target_commit):
57 def to_reviewers(e):
58 if isinstance(e, (tuple, list)):
59 return map(reviewer_as_json, e)
60 else:
61 return reviewer_as_json(e)
62
63
64 def get_default_reviewers_data(current_user, source_repo, source_ref, target_repo, target_ref,
65 include_diff_info=True):
58 66 """
59 67 Return json for default reviewers of a repository
60 68 """
61 69
70 diff_info = {}
71 if include_diff_info:
62 72 diff_info = get_diff_info(
63 source_repo, source_commit.raw_id, target_repo, target_commit.raw_id)
73 source_repo, source_ref.commit_id, target_repo, target_ref.commit_id)
64 74
65 75 reasons = ['Default reviewer', 'Repository owner']
66 76 json_reviewers = [reviewer_as_json(
67 77 user=target_repo.user, reasons=reasons, mandatory=False, rules=None, role=None)]
68 78
79 compute_key = rc_cache.utils.compute_key_from_params(
80 current_user.user_id, source_repo.repo_id, source_ref.type, source_ref.name,
81 source_ref.commit_id, target_repo.repo_id, target_ref.type, target_ref.name,
82 target_ref.commit_id)
83
69 84 return {
70 85 'api_ver': REVIEWER_API_VERSION, # define version for later possible schema upgrade
86 'compute_key': compute_key,
71 87 'diff_info': diff_info,
72 88 'reviewers': json_reviewers,
73 89 'rules': {},
74 90 'rules_data': {},
75 91 }
76 92
77 93
78 94 def validate_default_reviewers(review_members, reviewer_rules):
79 95 """
80 96 Function to validate submitted reviewers against the saved rules
81 97 """
82 98 reviewers = []
83 99 reviewer_by_id = {}
84 100 for r in review_members:
85 101 reviewer_user_id = safe_int(r['user_id'])
86 102 entry = (reviewer_user_id, r['reasons'], r['mandatory'], r['role'], r['rules'])
87 103
88 104 reviewer_by_id[reviewer_user_id] = entry
89 105 reviewers.append(entry)
90 106
91 107 return reviewers
92 108
93 109
94 110 def validate_observers(observer_members):
95 111 return {}
@@ -1,1806 +1,1811 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33
34 34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 35 from rhodecode.lib.base import vcs_operation_context
36 36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 37 from rhodecode.lib.exceptions import CommentVersionMismatch
38 38 from rhodecode.lib.ext_json import json
39 39 from rhodecode.lib.auth import (
40 40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 41 NotAnonymous, CSRFRequired)
42 42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode, safe_int, aslist
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason, Reference
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
46 46 from rhodecode.model.changeset_status import ChangesetStatusModel
47 47 from rhodecode.model.comment import CommentsModel
48 48 from rhodecode.model.db import (
49 49 func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository,
50 50 PullRequestReviewers)
51 51 from rhodecode.model.forms import PullRequestForm
52 52 from rhodecode.model.meta import Session
53 53 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
54 54 from rhodecode.model.scm import ScmModel
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 class RepoPullRequestsView(RepoAppView, DataGridAppView):
60 60
61 61 def load_default_context(self):
62 62 c = self._get_local_tmpl_context(include_app_defaults=True)
63 63 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
64 64 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
65 65 # backward compat., we use for OLD PRs a plain renderer
66 66 c.renderer = 'plain'
67 67 return c
68 68
69 69 def _get_pull_requests_list(
70 70 self, repo_name, source, filter_type, opened_by, statuses):
71 71
72 72 draw, start, limit = self._extract_chunk(self.request)
73 73 search_q, order_by, order_dir = self._extract_ordering(self.request)
74 74 _render = self.request.get_partial_renderer(
75 75 'rhodecode:templates/data_table/_dt_elements.mako')
76 76
77 77 # pagination
78 78
79 79 if filter_type == 'awaiting_review':
80 80 pull_requests = PullRequestModel().get_awaiting_review(
81 81 repo_name, search_q=search_q, source=source, opened_by=opened_by,
82 82 statuses=statuses, offset=start, length=limit,
83 83 order_by=order_by, order_dir=order_dir)
84 84 pull_requests_total_count = PullRequestModel().count_awaiting_review(
85 85 repo_name, search_q=search_q, source=source, statuses=statuses,
86 86 opened_by=opened_by)
87 87 elif filter_type == 'awaiting_my_review':
88 88 pull_requests = PullRequestModel().get_awaiting_my_review(
89 89 repo_name, search_q=search_q, source=source, opened_by=opened_by,
90 90 user_id=self._rhodecode_user.user_id, statuses=statuses,
91 91 offset=start, length=limit, order_by=order_by,
92 92 order_dir=order_dir)
93 93 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
94 94 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
95 95 statuses=statuses, opened_by=opened_by)
96 96 else:
97 97 pull_requests = PullRequestModel().get_all(
98 98 repo_name, search_q=search_q, source=source, opened_by=opened_by,
99 99 statuses=statuses, offset=start, length=limit,
100 100 order_by=order_by, order_dir=order_dir)
101 101 pull_requests_total_count = PullRequestModel().count_all(
102 102 repo_name, search_q=search_q, source=source, statuses=statuses,
103 103 opened_by=opened_by)
104 104
105 105 data = []
106 106 comments_model = CommentsModel()
107 107 for pr in pull_requests:
108 108 comments_count = comments_model.get_all_comments(
109 109 self.db_repo.repo_id, pull_request=pr, count_only=True)
110 110
111 111 data.append({
112 112 'name': _render('pullrequest_name',
113 113 pr.pull_request_id, pr.pull_request_state,
114 114 pr.work_in_progress, pr.target_repo.repo_name),
115 115 'name_raw': pr.pull_request_id,
116 116 'status': _render('pullrequest_status',
117 117 pr.calculated_review_status()),
118 118 'title': _render('pullrequest_title', pr.title, pr.description),
119 119 'description': h.escape(pr.description),
120 120 'updated_on': _render('pullrequest_updated_on',
121 121 h.datetime_to_time(pr.updated_on)),
122 122 'updated_on_raw': h.datetime_to_time(pr.updated_on),
123 123 'created_on': _render('pullrequest_updated_on',
124 124 h.datetime_to_time(pr.created_on)),
125 125 'created_on_raw': h.datetime_to_time(pr.created_on),
126 126 'state': pr.pull_request_state,
127 127 'author': _render('pullrequest_author',
128 128 pr.author.full_contact, ),
129 129 'author_raw': pr.author.full_name,
130 130 'comments': _render('pullrequest_comments', comments_count),
131 131 'comments_raw': comments_count,
132 132 'closed': pr.is_closed(),
133 133 })
134 134
135 135 data = ({
136 136 'draw': draw,
137 137 'data': data,
138 138 'recordsTotal': pull_requests_total_count,
139 139 'recordsFiltered': pull_requests_total_count,
140 140 })
141 141 return data
142 142
143 143 @LoginRequired()
144 144 @HasRepoPermissionAnyDecorator(
145 145 'repository.read', 'repository.write', 'repository.admin')
146 146 @view_config(
147 147 route_name='pullrequest_show_all', request_method='GET',
148 148 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
149 149 def pull_request_list(self):
150 150 c = self.load_default_context()
151 151
152 152 req_get = self.request.GET
153 153 c.source = str2bool(req_get.get('source'))
154 154 c.closed = str2bool(req_get.get('closed'))
155 155 c.my = str2bool(req_get.get('my'))
156 156 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
157 157 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
158 158
159 159 c.active = 'open'
160 160 if c.my:
161 161 c.active = 'my'
162 162 if c.closed:
163 163 c.active = 'closed'
164 164 if c.awaiting_review and not c.source:
165 165 c.active = 'awaiting'
166 166 if c.source and not c.awaiting_review:
167 167 c.active = 'source'
168 168 if c.awaiting_my_review:
169 169 c.active = 'awaiting_my'
170 170
171 171 return self._get_template_context(c)
172 172
173 173 @LoginRequired()
174 174 @HasRepoPermissionAnyDecorator(
175 175 'repository.read', 'repository.write', 'repository.admin')
176 176 @view_config(
177 177 route_name='pullrequest_show_all_data', request_method='GET',
178 178 renderer='json_ext', xhr=True)
179 179 def pull_request_list_data(self):
180 180 self.load_default_context()
181 181
182 182 # additional filters
183 183 req_get = self.request.GET
184 184 source = str2bool(req_get.get('source'))
185 185 closed = str2bool(req_get.get('closed'))
186 186 my = str2bool(req_get.get('my'))
187 187 awaiting_review = str2bool(req_get.get('awaiting_review'))
188 188 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
189 189
190 190 filter_type = 'awaiting_review' if awaiting_review \
191 191 else 'awaiting_my_review' if awaiting_my_review \
192 192 else None
193 193
194 194 opened_by = None
195 195 if my:
196 196 opened_by = [self._rhodecode_user.user_id]
197 197
198 198 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
199 199 if closed:
200 200 statuses = [PullRequest.STATUS_CLOSED]
201 201
202 202 data = self._get_pull_requests_list(
203 203 repo_name=self.db_repo_name, source=source,
204 204 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
205 205
206 206 return data
207 207
208 208 def _is_diff_cache_enabled(self, target_repo):
209 209 caching_enabled = self._get_general_setting(
210 210 target_repo, 'rhodecode_diff_cache')
211 211 log.debug('Diff caching enabled: %s', caching_enabled)
212 212 return caching_enabled
213 213
214 214 def _get_diffset(self, source_repo_name, source_repo,
215 215 ancestor_commit,
216 216 source_ref_id, target_ref_id,
217 217 target_commit, source_commit, diff_limit, file_limit,
218 218 fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True):
219 219
220 220 if use_ancestor:
221 221 # we might want to not use it for versions
222 222 target_ref_id = ancestor_commit.raw_id
223 223
224 224 vcs_diff = PullRequestModel().get_diff(
225 225 source_repo, source_ref_id, target_ref_id,
226 226 hide_whitespace_changes, diff_context)
227 227
228 228 diff_processor = diffs.DiffProcessor(
229 229 vcs_diff, format='newdiff', diff_limit=diff_limit,
230 230 file_limit=file_limit, show_full_diff=fulldiff)
231 231
232 232 _parsed = diff_processor.prepare()
233 233
234 234 diffset = codeblocks.DiffSet(
235 235 repo_name=self.db_repo_name,
236 236 source_repo_name=source_repo_name,
237 237 source_node_getter=codeblocks.diffset_node_getter(target_commit),
238 238 target_node_getter=codeblocks.diffset_node_getter(source_commit),
239 239 )
240 240 diffset = self.path_filter.render_patchset_filtered(
241 241 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
242 242
243 243 return diffset
244 244
245 245 def _get_range_diffset(self, source_scm, source_repo,
246 246 commit1, commit2, diff_limit, file_limit,
247 247 fulldiff, hide_whitespace_changes, diff_context):
248 248 vcs_diff = source_scm.get_diff(
249 249 commit1, commit2,
250 250 ignore_whitespace=hide_whitespace_changes,
251 251 context=diff_context)
252 252
253 253 diff_processor = diffs.DiffProcessor(
254 254 vcs_diff, format='newdiff', diff_limit=diff_limit,
255 255 file_limit=file_limit, show_full_diff=fulldiff)
256 256
257 257 _parsed = diff_processor.prepare()
258 258
259 259 diffset = codeblocks.DiffSet(
260 260 repo_name=source_repo.repo_name,
261 261 source_node_getter=codeblocks.diffset_node_getter(commit1),
262 262 target_node_getter=codeblocks.diffset_node_getter(commit2))
263 263
264 264 diffset = self.path_filter.render_patchset_filtered(
265 265 diffset, _parsed, commit1.raw_id, commit2.raw_id)
266 266
267 267 return diffset
268 268
269 269 def register_comments_vars(self, c, pull_request, versions):
270 270 comments_model = CommentsModel()
271 271
272 272 # GENERAL COMMENTS with versions #
273 273 q = comments_model._all_general_comments_of_pull_request(pull_request)
274 274 q = q.order_by(ChangesetComment.comment_id.asc())
275 275 general_comments = q
276 276
277 277 # pick comments we want to render at current version
278 278 c.comment_versions = comments_model.aggregate_comments(
279 279 general_comments, versions, c.at_version_num)
280 280
281 281 # INLINE COMMENTS with versions #
282 282 q = comments_model._all_inline_comments_of_pull_request(pull_request)
283 283 q = q.order_by(ChangesetComment.comment_id.asc())
284 284 inline_comments = q
285 285
286 286 c.inline_versions = comments_model.aggregate_comments(
287 287 inline_comments, versions, c.at_version_num, inline=True)
288 288
289 289 # Comments inline+general
290 290 if c.at_version:
291 291 c.inline_comments_flat = c.inline_versions[c.at_version_num]['display']
292 292 c.comments = c.comment_versions[c.at_version_num]['display']
293 293 else:
294 294 c.inline_comments_flat = c.inline_versions[c.at_version_num]['until']
295 295 c.comments = c.comment_versions[c.at_version_num]['until']
296 296
297 297 return general_comments, inline_comments
298 298
299 299 @LoginRequired()
300 300 @HasRepoPermissionAnyDecorator(
301 301 'repository.read', 'repository.write', 'repository.admin')
302 302 @view_config(
303 303 route_name='pullrequest_show', request_method='GET',
304 304 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
305 305 def pull_request_show(self):
306 306 _ = self.request.translate
307 307 c = self.load_default_context()
308 308
309 309 pull_request = PullRequest.get_or_404(
310 310 self.request.matchdict['pull_request_id'])
311 311 pull_request_id = pull_request.pull_request_id
312 312
313 313 c.state_progressing = pull_request.is_state_changing()
314 314 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
315 315
316 316 _new_state = {
317 317 'created': PullRequest.STATE_CREATED,
318 318 }.get(self.request.GET.get('force_state'))
319 319
320 320 if c.is_super_admin and _new_state:
321 321 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
322 322 h.flash(
323 323 _('Pull Request state was force changed to `{}`').format(_new_state),
324 324 category='success')
325 325 Session().commit()
326 326
327 327 raise HTTPFound(h.route_path(
328 328 'pullrequest_show', repo_name=self.db_repo_name,
329 329 pull_request_id=pull_request_id))
330 330
331 331 version = self.request.GET.get('version')
332 332 from_version = self.request.GET.get('from_version') or version
333 333 merge_checks = self.request.GET.get('merge_checks')
334 334 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
335 335 force_refresh = str2bool(self.request.GET.get('force_refresh'))
336 336 c.range_diff_on = self.request.GET.get('range-diff') == "1"
337 337
338 338 # fetch global flags of ignore ws or context lines
339 339 diff_context = diffs.get_diff_context(self.request)
340 340 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
341 341
342 342 (pull_request_latest,
343 343 pull_request_at_ver,
344 344 pull_request_display_obj,
345 345 at_version) = PullRequestModel().get_pr_version(
346 346 pull_request_id, version=version)
347 347
348 348 pr_closed = pull_request_latest.is_closed()
349 349
350 350 if pr_closed and (version or from_version):
351 351 # not allow to browse versions for closed PR
352 352 raise HTTPFound(h.route_path(
353 353 'pullrequest_show', repo_name=self.db_repo_name,
354 354 pull_request_id=pull_request_id))
355 355
356 356 versions = pull_request_display_obj.versions()
357 357 # used to store per-commit range diffs
358 358 c.changes = collections.OrderedDict()
359 359
360 360 c.at_version = at_version
361 361 c.at_version_num = (at_version
362 362 if at_version and at_version != PullRequest.LATEST_VER
363 363 else None)
364 364
365 365 c.at_version_index = ChangesetComment.get_index_from_version(
366 366 c.at_version_num, versions)
367 367
368 368 (prev_pull_request_latest,
369 369 prev_pull_request_at_ver,
370 370 prev_pull_request_display_obj,
371 371 prev_at_version) = PullRequestModel().get_pr_version(
372 372 pull_request_id, version=from_version)
373 373
374 374 c.from_version = prev_at_version
375 375 c.from_version_num = (prev_at_version
376 376 if prev_at_version and prev_at_version != PullRequest.LATEST_VER
377 377 else None)
378 378 c.from_version_index = ChangesetComment.get_index_from_version(
379 379 c.from_version_num, versions)
380 380
381 381 # define if we're in COMPARE mode or VIEW at version mode
382 382 compare = at_version != prev_at_version
383 383
384 384 # pull_requests repo_name we opened it against
385 385 # ie. target_repo must match
386 386 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
387 387 log.warning('Mismatch between the current repo: %s, and target %s',
388 388 self.db_repo_name, pull_request_at_ver.target_repo.repo_name)
389 389 raise HTTPNotFound()
390 390
391 391 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(pull_request_at_ver)
392 392
393 393 c.pull_request = pull_request_display_obj
394 394 c.renderer = pull_request_at_ver.description_renderer or c.renderer
395 395 c.pull_request_latest = pull_request_latest
396 396
397 397 # inject latest version
398 398 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
399 399 c.versions = versions + [latest_ver]
400 400
401 401 if compare or (at_version and not at_version == PullRequest.LATEST_VER):
402 402 c.allowed_to_change_status = False
403 403 c.allowed_to_update = False
404 404 c.allowed_to_merge = False
405 405 c.allowed_to_delete = False
406 406 c.allowed_to_comment = False
407 407 c.allowed_to_close = False
408 408 else:
409 409 can_change_status = PullRequestModel().check_user_change_status(
410 410 pull_request_at_ver, self._rhodecode_user)
411 411 c.allowed_to_change_status = can_change_status and not pr_closed
412 412
413 413 c.allowed_to_update = PullRequestModel().check_user_update(
414 414 pull_request_latest, self._rhodecode_user) and not pr_closed
415 415 c.allowed_to_merge = PullRequestModel().check_user_merge(
416 416 pull_request_latest, self._rhodecode_user) and not pr_closed
417 417 c.allowed_to_delete = PullRequestModel().check_user_delete(
418 418 pull_request_latest, self._rhodecode_user) and not pr_closed
419 419 c.allowed_to_comment = not pr_closed
420 420 c.allowed_to_close = c.allowed_to_merge and not pr_closed
421 421
422 422 c.forbid_adding_reviewers = False
423 423 c.forbid_author_to_review = False
424 424 c.forbid_commit_author_to_review = False
425 425
426 426 if pull_request_latest.reviewer_data and \
427 427 'rules' in pull_request_latest.reviewer_data:
428 428 rules = pull_request_latest.reviewer_data['rules'] or {}
429 429 try:
430 430 c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers')
431 431 c.forbid_author_to_review = rules.get('forbid_author_to_review')
432 432 c.forbid_commit_author_to_review = rules.get('forbid_commit_author_to_review')
433 433 except Exception:
434 434 pass
435 435
436 436 # check merge capabilities
437 437 _merge_check = MergeCheck.validate(
438 438 pull_request_latest, auth_user=self._rhodecode_user,
439 439 translator=self.request.translate,
440 440 force_shadow_repo_refresh=force_refresh)
441 441
442 442 c.pr_merge_errors = _merge_check.error_details
443 443 c.pr_merge_possible = not _merge_check.failed
444 444 c.pr_merge_message = _merge_check.merge_msg
445 445 c.pr_merge_source_commit = _merge_check.source_commit
446 446 c.pr_merge_target_commit = _merge_check.target_commit
447 447
448 448 c.pr_merge_info = MergeCheck.get_merge_conditions(
449 449 pull_request_latest, translator=self.request.translate)
450 450
451 451 c.pull_request_review_status = _merge_check.review_status
452 452 if merge_checks:
453 453 self.request.override_renderer = \
454 454 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
455 455 return self._get_template_context(c)
456 456
457 457 c.allowed_reviewers = [obj.user_id for obj in pull_request.reviewers if obj.user]
458 458 c.reviewers_count = pull_request.reviewers_count
459 459 c.observers_count = pull_request.observers_count
460 460
461 461 # reviewers and statuses
462 462 c.pull_request_default_reviewers_data_json = json.dumps(pull_request.reviewer_data)
463 463 c.pull_request_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
464 464 c.pull_request_set_observers_data_json = collections.OrderedDict({'observers': []})
465 465
466 466 for review_obj, member, reasons, mandatory, status in pull_request_at_ver.reviewers_statuses():
467 467 member_reviewer = h.reviewer_as_json(
468 468 member, reasons=reasons, mandatory=mandatory,
469 469 role=review_obj.role,
470 470 user_group=review_obj.rule_user_group_data()
471 471 )
472 472
473 473 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
474 474 member_reviewer['review_status'] = current_review_status
475 475 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
476 476 member_reviewer['allowed_to_update'] = c.allowed_to_update
477 477 c.pull_request_set_reviewers_data_json['reviewers'].append(member_reviewer)
478 478
479 479 c.pull_request_set_reviewers_data_json = json.dumps(c.pull_request_set_reviewers_data_json)
480 480
481 481 for observer_obj, member in pull_request_at_ver.observers():
482 482 member_observer = h.reviewer_as_json(
483 483 member, reasons=[], mandatory=False,
484 484 role=observer_obj.role,
485 485 user_group=observer_obj.rule_user_group_data()
486 486 )
487 487 member_observer['allowed_to_update'] = c.allowed_to_update
488 488 c.pull_request_set_observers_data_json['observers'].append(member_observer)
489 489
490 490 c.pull_request_set_observers_data_json = json.dumps(c.pull_request_set_observers_data_json)
491 491
492 492 general_comments, inline_comments = \
493 493 self.register_comments_vars(c, pull_request_latest, versions)
494 494
495 495 # TODOs
496 496 c.unresolved_comments = CommentsModel() \
497 497 .get_pull_request_unresolved_todos(pull_request_latest)
498 498 c.resolved_comments = CommentsModel() \
499 499 .get_pull_request_resolved_todos(pull_request_latest)
500 500
501 501 # if we use version, then do not show later comments
502 502 # than current version
503 503 display_inline_comments = collections.defaultdict(
504 504 lambda: collections.defaultdict(list))
505 505 for co in inline_comments:
506 506 if c.at_version_num:
507 507 # pick comments that are at least UPTO given version, so we
508 508 # don't render comments for higher version
509 509 should_render = co.pull_request_version_id and \
510 510 co.pull_request_version_id <= c.at_version_num
511 511 else:
512 512 # showing all, for 'latest'
513 513 should_render = True
514 514
515 515 if should_render:
516 516 display_inline_comments[co.f_path][co.line_no].append(co)
517 517
518 518 # load diff data into template context, if we use compare mode then
519 519 # diff is calculated based on changes between versions of PR
520 520
521 521 source_repo = pull_request_at_ver.source_repo
522 522 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
523 523
524 524 target_repo = pull_request_at_ver.target_repo
525 525 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
526 526
527 527 if compare:
528 528 # in compare switch the diff base to latest commit from prev version
529 529 target_ref_id = prev_pull_request_display_obj.revisions[0]
530 530
531 531 # despite opening commits for bookmarks/branches/tags, we always
532 532 # convert this to rev to prevent changes after bookmark or branch change
533 533 c.source_ref_type = 'rev'
534 534 c.source_ref = source_ref_id
535 535
536 536 c.target_ref_type = 'rev'
537 537 c.target_ref = target_ref_id
538 538
539 539 c.source_repo = source_repo
540 540 c.target_repo = target_repo
541 541
542 542 c.commit_ranges = []
543 543 source_commit = EmptyCommit()
544 544 target_commit = EmptyCommit()
545 545 c.missing_requirements = False
546 546
547 547 source_scm = source_repo.scm_instance()
548 548 target_scm = target_repo.scm_instance()
549 549
550 550 shadow_scm = None
551 551 try:
552 552 shadow_scm = pull_request_latest.get_shadow_repo()
553 553 except Exception:
554 554 log.debug('Failed to get shadow repo', exc_info=True)
555 555 # try first the existing source_repo, and then shadow
556 556 # repo if we can obtain one
557 557 commits_source_repo = source_scm
558 558 if shadow_scm:
559 559 commits_source_repo = shadow_scm
560 560
561 561 c.commits_source_repo = commits_source_repo
562 562 c.ancestor = None # set it to None, to hide it from PR view
563 563
564 564 # empty version means latest, so we keep this to prevent
565 565 # double caching
566 566 version_normalized = version or PullRequest.LATEST_VER
567 567 from_version_normalized = from_version or PullRequest.LATEST_VER
568 568
569 569 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
570 570 cache_file_path = diff_cache_exist(
571 571 cache_path, 'pull_request', pull_request_id, version_normalized,
572 572 from_version_normalized, source_ref_id, target_ref_id,
573 573 hide_whitespace_changes, diff_context, c.fulldiff)
574 574
575 575 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
576 576 force_recache = self.get_recache_flag()
577 577
578 578 cached_diff = None
579 579 if caching_enabled:
580 580 cached_diff = load_cached_diff(cache_file_path)
581 581
582 582 has_proper_commit_cache = (
583 583 cached_diff and cached_diff.get('commits')
584 584 and len(cached_diff.get('commits', [])) == 5
585 585 and cached_diff.get('commits')[0]
586 586 and cached_diff.get('commits')[3])
587 587
588 588 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
589 589 diff_commit_cache = \
590 590 (ancestor_commit, commit_cache, missing_requirements,
591 591 source_commit, target_commit) = cached_diff['commits']
592 592 else:
593 593 # NOTE(marcink): we reach potentially unreachable errors when a PR has
594 594 # merge errors resulting in potentially hidden commits in the shadow repo.
595 595 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
596 596 and _merge_check.merge_response
597 597 maybe_unreachable = maybe_unreachable \
598 598 and _merge_check.merge_response.metadata.get('unresolved_files')
599 599 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
600 600 diff_commit_cache = \
601 601 (ancestor_commit, commit_cache, missing_requirements,
602 602 source_commit, target_commit) = self.get_commits(
603 603 commits_source_repo,
604 604 pull_request_at_ver,
605 605 source_commit,
606 606 source_ref_id,
607 607 source_scm,
608 608 target_commit,
609 609 target_ref_id,
610 610 target_scm,
611 611 maybe_unreachable=maybe_unreachable)
612 612
613 613 # register our commit range
614 614 for comm in commit_cache.values():
615 615 c.commit_ranges.append(comm)
616 616
617 617 c.missing_requirements = missing_requirements
618 618 c.ancestor_commit = ancestor_commit
619 619 c.statuses = source_repo.statuses(
620 620 [x.raw_id for x in c.commit_ranges])
621 621
622 622 # auto collapse if we have more than limit
623 623 collapse_limit = diffs.DiffProcessor._collapse_commits_over
624 624 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
625 625 c.compare_mode = compare
626 626
627 627 # diff_limit is the old behavior, will cut off the whole diff
628 628 # if the limit is applied otherwise will just hide the
629 629 # big files from the front-end
630 630 diff_limit = c.visual.cut_off_limit_diff
631 631 file_limit = c.visual.cut_off_limit_file
632 632
633 633 c.missing_commits = False
634 634 if (c.missing_requirements
635 635 or isinstance(source_commit, EmptyCommit)
636 636 or source_commit == target_commit):
637 637
638 638 c.missing_commits = True
639 639 else:
640 640 c.inline_comments = display_inline_comments
641 641
642 642 use_ancestor = True
643 643 if from_version_normalized != version_normalized:
644 644 use_ancestor = False
645 645
646 646 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
647 647 if not force_recache and has_proper_diff_cache:
648 648 c.diffset = cached_diff['diff']
649 649 else:
650 650 try:
651 651 c.diffset = self._get_diffset(
652 652 c.source_repo.repo_name, commits_source_repo,
653 653 c.ancestor_commit,
654 654 source_ref_id, target_ref_id,
655 655 target_commit, source_commit,
656 656 diff_limit, file_limit, c.fulldiff,
657 657 hide_whitespace_changes, diff_context,
658 658 use_ancestor=use_ancestor
659 659 )
660 660
661 661 # save cached diff
662 662 if caching_enabled:
663 663 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
664 664 except CommitDoesNotExistError:
665 665 log.exception('Failed to generate diffset')
666 666 c.missing_commits = True
667 667
668 668 if not c.missing_commits:
669 669
670 670 c.limited_diff = c.diffset.limited_diff
671 671
672 672 # calculate removed files that are bound to comments
673 673 comment_deleted_files = [
674 674 fname for fname in display_inline_comments
675 675 if fname not in c.diffset.file_stats]
676 676
677 677 c.deleted_files_comments = collections.defaultdict(dict)
678 678 for fname, per_line_comments in display_inline_comments.items():
679 679 if fname in comment_deleted_files:
680 680 c.deleted_files_comments[fname]['stats'] = 0
681 681 c.deleted_files_comments[fname]['comments'] = list()
682 682 for lno, comments in per_line_comments.items():
683 683 c.deleted_files_comments[fname]['comments'].extend(comments)
684 684
685 685 # maybe calculate the range diff
686 686 if c.range_diff_on:
687 687 # TODO(marcink): set whitespace/context
688 688 context_lcl = 3
689 689 ign_whitespace_lcl = False
690 690
691 691 for commit in c.commit_ranges:
692 692 commit2 = commit
693 693 commit1 = commit.first_parent
694 694
695 695 range_diff_cache_file_path = diff_cache_exist(
696 696 cache_path, 'diff', commit.raw_id,
697 697 ign_whitespace_lcl, context_lcl, c.fulldiff)
698 698
699 699 cached_diff = None
700 700 if caching_enabled:
701 701 cached_diff = load_cached_diff(range_diff_cache_file_path)
702 702
703 703 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
704 704 if not force_recache and has_proper_diff_cache:
705 705 diffset = cached_diff['diff']
706 706 else:
707 707 diffset = self._get_range_diffset(
708 708 commits_source_repo, source_repo,
709 709 commit1, commit2, diff_limit, file_limit,
710 710 c.fulldiff, ign_whitespace_lcl, context_lcl
711 711 )
712 712
713 713 # save cached diff
714 714 if caching_enabled:
715 715 cache_diff(range_diff_cache_file_path, diffset, None)
716 716
717 717 c.changes[commit.raw_id] = diffset
718 718
719 719 # this is a hack to properly display links, when creating PR, the
720 720 # compare view and others uses different notation, and
721 721 # compare_commits.mako renders links based on the target_repo.
722 722 # We need to swap that here to generate it properly on the html side
723 723 c.target_repo = c.source_repo
724 724
725 725 c.commit_statuses = ChangesetStatus.STATUSES
726 726
727 727 c.show_version_changes = not pr_closed
728 728 if c.show_version_changes:
729 729 cur_obj = pull_request_at_ver
730 730 prev_obj = prev_pull_request_at_ver
731 731
732 732 old_commit_ids = prev_obj.revisions
733 733 new_commit_ids = cur_obj.revisions
734 734 commit_changes = PullRequestModel()._calculate_commit_id_changes(
735 735 old_commit_ids, new_commit_ids)
736 736 c.commit_changes_summary = commit_changes
737 737
738 738 # calculate the diff for commits between versions
739 739 c.commit_changes = []
740 740
741 741 def mark(cs, fw):
742 742 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
743 743
744 744 for c_type, raw_id in mark(commit_changes.added, 'a') \
745 745 + mark(commit_changes.removed, 'r') \
746 746 + mark(commit_changes.common, 'c'):
747 747
748 748 if raw_id in commit_cache:
749 749 commit = commit_cache[raw_id]
750 750 else:
751 751 try:
752 752 commit = commits_source_repo.get_commit(raw_id)
753 753 except CommitDoesNotExistError:
754 754 # in case we fail extracting still use "dummy" commit
755 755 # for display in commit diff
756 756 commit = h.AttributeDict(
757 757 {'raw_id': raw_id,
758 758 'message': 'EMPTY or MISSING COMMIT'})
759 759 c.commit_changes.append([c_type, commit])
760 760
761 761 # current user review statuses for each version
762 762 c.review_versions = {}
763 763 if self._rhodecode_user.user_id in c.allowed_reviewers:
764 764 for co in general_comments:
765 765 if co.author.user_id == self._rhodecode_user.user_id:
766 766 status = co.status_change
767 767 if status:
768 768 _ver_pr = status[0].comment.pull_request_version_id
769 769 c.review_versions[_ver_pr] = status[0]
770 770
771 771 return self._get_template_context(c)
772 772
773 773 def get_commits(
774 774 self, commits_source_repo, pull_request_at_ver, source_commit,
775 775 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
776 776 maybe_unreachable=False):
777 777
778 778 commit_cache = collections.OrderedDict()
779 779 missing_requirements = False
780 780
781 781 try:
782 782 pre_load = ["author", "date", "message", "branch", "parents"]
783 783
784 784 pull_request_commits = pull_request_at_ver.revisions
785 785 log.debug('Loading %s commits from %s',
786 786 len(pull_request_commits), commits_source_repo)
787 787
788 788 for rev in pull_request_commits:
789 789 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
790 790 maybe_unreachable=maybe_unreachable)
791 791 commit_cache[comm.raw_id] = comm
792 792
793 793 # Order here matters, we first need to get target, and then
794 794 # the source
795 795 target_commit = commits_source_repo.get_commit(
796 796 commit_id=safe_str(target_ref_id))
797 797
798 798 source_commit = commits_source_repo.get_commit(
799 799 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
800 800 except CommitDoesNotExistError:
801 801 log.warning('Failed to get commit from `{}` repo'.format(
802 802 commits_source_repo), exc_info=True)
803 803 except RepositoryRequirementError:
804 804 log.warning('Failed to get all required data from repo', exc_info=True)
805 805 missing_requirements = True
806 806
807 807 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
808 808
809 809 try:
810 810 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
811 811 except Exception:
812 812 ancestor_commit = None
813 813
814 814 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
815 815
816 816 def assure_not_empty_repo(self):
817 817 _ = self.request.translate
818 818
819 819 try:
820 820 self.db_repo.scm_instance().get_commit()
821 821 except EmptyRepositoryError:
822 822 h.flash(h.literal(_('There are no commits yet')),
823 823 category='warning')
824 824 raise HTTPFound(
825 825 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
826 826
827 827 @LoginRequired()
828 828 @NotAnonymous()
829 829 @HasRepoPermissionAnyDecorator(
830 830 'repository.read', 'repository.write', 'repository.admin')
831 831 @view_config(
832 832 route_name='pullrequest_new', request_method='GET',
833 833 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
834 834 def pull_request_new(self):
835 835 _ = self.request.translate
836 836 c = self.load_default_context()
837 837
838 838 self.assure_not_empty_repo()
839 839 source_repo = self.db_repo
840 840
841 841 commit_id = self.request.GET.get('commit')
842 842 branch_ref = self.request.GET.get('branch')
843 843 bookmark_ref = self.request.GET.get('bookmark')
844 844
845 845 try:
846 846 source_repo_data = PullRequestModel().generate_repo_data(
847 847 source_repo, commit_id=commit_id,
848 848 branch=branch_ref, bookmark=bookmark_ref,
849 849 translator=self.request.translate)
850 850 except CommitDoesNotExistError as e:
851 851 log.exception(e)
852 852 h.flash(_('Commit does not exist'), 'error')
853 853 raise HTTPFound(
854 854 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
855 855
856 856 default_target_repo = source_repo
857 857
858 858 if source_repo.parent and c.has_origin_repo_read_perm:
859 859 parent_vcs_obj = source_repo.parent.scm_instance()
860 860 if parent_vcs_obj and not parent_vcs_obj.is_empty():
861 861 # change default if we have a parent repo
862 862 default_target_repo = source_repo.parent
863 863
864 864 target_repo_data = PullRequestModel().generate_repo_data(
865 865 default_target_repo, translator=self.request.translate)
866 866
867 867 selected_source_ref = source_repo_data['refs']['selected_ref']
868 868 title_source_ref = ''
869 869 if selected_source_ref:
870 870 title_source_ref = selected_source_ref.split(':', 2)[1]
871 871 c.default_title = PullRequestModel().generate_pullrequest_title(
872 872 source=source_repo.repo_name,
873 873 source_ref=title_source_ref,
874 874 target=default_target_repo.repo_name
875 875 )
876 876
877 877 c.default_repo_data = {
878 878 'source_repo_name': source_repo.repo_name,
879 879 'source_refs_json': json.dumps(source_repo_data),
880 880 'target_repo_name': default_target_repo.repo_name,
881 881 'target_refs_json': json.dumps(target_repo_data),
882 882 }
883 883 c.default_source_ref = selected_source_ref
884 884
885 885 return self._get_template_context(c)
886 886
887 887 @LoginRequired()
888 888 @NotAnonymous()
889 889 @HasRepoPermissionAnyDecorator(
890 890 'repository.read', 'repository.write', 'repository.admin')
891 891 @view_config(
892 892 route_name='pullrequest_repo_refs', request_method='GET',
893 893 renderer='json_ext', xhr=True)
894 894 def pull_request_repo_refs(self):
895 895 self.load_default_context()
896 896 target_repo_name = self.request.matchdict['target_repo_name']
897 897 repo = Repository.get_by_repo_name(target_repo_name)
898 898 if not repo:
899 899 raise HTTPNotFound()
900 900
901 901 target_perm = HasRepoPermissionAny(
902 902 'repository.read', 'repository.write', 'repository.admin')(
903 903 target_repo_name)
904 904 if not target_perm:
905 905 raise HTTPNotFound()
906 906
907 907 return PullRequestModel().generate_repo_data(
908 908 repo, translator=self.request.translate)
909 909
910 910 @LoginRequired()
911 911 @NotAnonymous()
912 912 @HasRepoPermissionAnyDecorator(
913 913 'repository.read', 'repository.write', 'repository.admin')
914 914 @view_config(
915 915 route_name='pullrequest_repo_targets', request_method='GET',
916 916 renderer='json_ext', xhr=True)
917 917 def pullrequest_repo_targets(self):
918 918 _ = self.request.translate
919 919 filter_query = self.request.GET.get('query')
920 920
921 921 # get the parents
922 922 parent_target_repos = []
923 923 if self.db_repo.parent:
924 924 parents_query = Repository.query() \
925 925 .order_by(func.length(Repository.repo_name)) \
926 926 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
927 927
928 928 if filter_query:
929 929 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
930 930 parents_query = parents_query.filter(
931 931 Repository.repo_name.ilike(ilike_expression))
932 932 parents = parents_query.limit(20).all()
933 933
934 934 for parent in parents:
935 935 parent_vcs_obj = parent.scm_instance()
936 936 if parent_vcs_obj and not parent_vcs_obj.is_empty():
937 937 parent_target_repos.append(parent)
938 938
939 939 # get other forks, and repo itself
940 940 query = Repository.query() \
941 941 .order_by(func.length(Repository.repo_name)) \
942 942 .filter(
943 943 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
944 944 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
945 945 ) \
946 946 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
947 947
948 948 if filter_query:
949 949 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
950 950 query = query.filter(Repository.repo_name.ilike(ilike_expression))
951 951
952 952 limit = max(20 - len(parent_target_repos), 5) # not less then 5
953 953 target_repos = query.limit(limit).all()
954 954
955 955 all_target_repos = target_repos + parent_target_repos
956 956
957 957 repos = []
958 958 # This checks permissions to the repositories
959 959 for obj in ScmModel().get_repos(all_target_repos):
960 960 repos.append({
961 961 'id': obj['name'],
962 962 'text': obj['name'],
963 963 'type': 'repo',
964 964 'repo_id': obj['dbrepo']['repo_id'],
965 965 'repo_type': obj['dbrepo']['repo_type'],
966 966 'private': obj['dbrepo']['private'],
967 967
968 968 })
969 969
970 970 data = {
971 971 'more': False,
972 972 'results': [{
973 973 'text': _('Repositories'),
974 974 'children': repos
975 975 }] if repos else []
976 976 }
977 977 return data
978 978
979 979 def _get_existing_ids(self, post_data):
980 980 return filter(lambda e: e, map(safe_int, aslist(post_data.get('comments'), ',')))
981 981
982 982 @LoginRequired()
983 983 @NotAnonymous()
984 984 @HasRepoPermissionAnyDecorator(
985 985 'repository.read', 'repository.write', 'repository.admin')
986 986 @view_config(
987 987 route_name='pullrequest_comments', request_method='POST',
988 988 renderer='string_html', xhr=True)
989 989 def pullrequest_comments(self):
990 990 self.load_default_context()
991 991
992 992 pull_request = PullRequest.get_or_404(
993 993 self.request.matchdict['pull_request_id'])
994 994 pull_request_id = pull_request.pull_request_id
995 995 version = self.request.GET.get('version')
996 996
997 997 _render = self.request.get_partial_renderer(
998 998 'rhodecode:templates/base/sidebar.mako')
999 999 c = _render.get_call_context()
1000 1000
1001 1001 (pull_request_latest,
1002 1002 pull_request_at_ver,
1003 1003 pull_request_display_obj,
1004 1004 at_version) = PullRequestModel().get_pr_version(
1005 1005 pull_request_id, version=version)
1006 1006 versions = pull_request_display_obj.versions()
1007 1007 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1008 1008 c.versions = versions + [latest_ver]
1009 1009
1010 1010 c.at_version = at_version
1011 1011 c.at_version_num = (at_version
1012 1012 if at_version and at_version != PullRequest.LATEST_VER
1013 1013 else None)
1014 1014
1015 1015 self.register_comments_vars(c, pull_request_latest, versions)
1016 1016 all_comments = c.inline_comments_flat + c.comments
1017 1017
1018 1018 existing_ids = self._get_existing_ids(self.request.POST)
1019 1019 return _render('comments_table', all_comments, len(all_comments),
1020 1020 existing_ids=existing_ids)
1021 1021
1022 1022 @LoginRequired()
1023 1023 @NotAnonymous()
1024 1024 @HasRepoPermissionAnyDecorator(
1025 1025 'repository.read', 'repository.write', 'repository.admin')
1026 1026 @view_config(
1027 1027 route_name='pullrequest_todos', request_method='POST',
1028 1028 renderer='string_html', xhr=True)
1029 1029 def pullrequest_todos(self):
1030 1030 self.load_default_context()
1031 1031
1032 1032 pull_request = PullRequest.get_or_404(
1033 1033 self.request.matchdict['pull_request_id'])
1034 1034 pull_request_id = pull_request.pull_request_id
1035 1035 version = self.request.GET.get('version')
1036 1036
1037 1037 _render = self.request.get_partial_renderer(
1038 1038 'rhodecode:templates/base/sidebar.mako')
1039 1039 c = _render.get_call_context()
1040 1040 (pull_request_latest,
1041 1041 pull_request_at_ver,
1042 1042 pull_request_display_obj,
1043 1043 at_version) = PullRequestModel().get_pr_version(
1044 1044 pull_request_id, version=version)
1045 1045 versions = pull_request_display_obj.versions()
1046 1046 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1047 1047 c.versions = versions + [latest_ver]
1048 1048
1049 1049 c.at_version = at_version
1050 1050 c.at_version_num = (at_version
1051 1051 if at_version and at_version != PullRequest.LATEST_VER
1052 1052 else None)
1053 1053
1054 1054 c.unresolved_comments = CommentsModel() \
1055 1055 .get_pull_request_unresolved_todos(pull_request)
1056 1056 c.resolved_comments = CommentsModel() \
1057 1057 .get_pull_request_resolved_todos(pull_request)
1058 1058
1059 1059 all_comments = c.unresolved_comments + c.resolved_comments
1060 1060 existing_ids = self._get_existing_ids(self.request.POST)
1061 1061 return _render('comments_table', all_comments, len(c.unresolved_comments),
1062 1062 todo_comments=True, existing_ids=existing_ids)
1063 1063
1064 1064 @LoginRequired()
1065 1065 @NotAnonymous()
1066 1066 @HasRepoPermissionAnyDecorator(
1067 1067 'repository.read', 'repository.write', 'repository.admin')
1068 1068 @CSRFRequired()
1069 1069 @view_config(
1070 1070 route_name='pullrequest_create', request_method='POST',
1071 1071 renderer=None)
1072 1072 def pull_request_create(self):
1073 1073 _ = self.request.translate
1074 1074 self.assure_not_empty_repo()
1075 1075 self.load_default_context()
1076 1076
1077 1077 controls = peppercorn.parse(self.request.POST.items())
1078 1078
1079 1079 try:
1080 1080 form = PullRequestForm(
1081 1081 self.request.translate, self.db_repo.repo_id)()
1082 1082 _form = form.to_python(controls)
1083 1083 except formencode.Invalid as errors:
1084 1084 if errors.error_dict.get('revisions'):
1085 1085 msg = 'Revisions: %s' % errors.error_dict['revisions']
1086 1086 elif errors.error_dict.get('pullrequest_title'):
1087 1087 msg = errors.error_dict.get('pullrequest_title')
1088 1088 else:
1089 1089 msg = _('Error creating pull request: {}').format(errors)
1090 1090 log.exception(msg)
1091 1091 h.flash(msg, 'error')
1092 1092
1093 1093 # would rather just go back to form ...
1094 1094 raise HTTPFound(
1095 1095 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1096 1096
1097 1097 source_repo = _form['source_repo']
1098 1098 source_ref = _form['source_ref']
1099 1099 target_repo = _form['target_repo']
1100 1100 target_ref = _form['target_ref']
1101 1101 commit_ids = _form['revisions'][::-1]
1102 1102 common_ancestor_id = _form['common_ancestor']
1103 1103
1104 1104 # find the ancestor for this pr
1105 1105 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
1106 1106 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
1107 1107
1108 1108 if not (source_db_repo or target_db_repo):
1109 1109 h.flash(_('source_repo or target repo not found'), category='error')
1110 1110 raise HTTPFound(
1111 1111 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1112 1112
1113 1113 # re-check permissions again here
1114 1114 # source_repo we must have read permissions
1115 1115
1116 1116 source_perm = HasRepoPermissionAny(
1117 1117 'repository.read', 'repository.write', 'repository.admin')(
1118 1118 source_db_repo.repo_name)
1119 1119 if not source_perm:
1120 1120 msg = _('Not Enough permissions to source repo `{}`.'.format(
1121 1121 source_db_repo.repo_name))
1122 1122 h.flash(msg, category='error')
1123 1123 # copy the args back to redirect
1124 1124 org_query = self.request.GET.mixed()
1125 1125 raise HTTPFound(
1126 1126 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1127 1127 _query=org_query))
1128 1128
1129 1129 # target repo we must have read permissions, and also later on
1130 1130 # we want to check branch permissions here
1131 1131 target_perm = HasRepoPermissionAny(
1132 1132 'repository.read', 'repository.write', 'repository.admin')(
1133 1133 target_db_repo.repo_name)
1134 1134 if not target_perm:
1135 1135 msg = _('Not Enough permissions to target repo `{}`.'.format(
1136 1136 target_db_repo.repo_name))
1137 1137 h.flash(msg, category='error')
1138 1138 # copy the args back to redirect
1139 1139 org_query = self.request.GET.mixed()
1140 1140 raise HTTPFound(
1141 1141 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1142 1142 _query=org_query))
1143 1143
1144 1144 source_scm = source_db_repo.scm_instance()
1145 1145 target_scm = target_db_repo.scm_instance()
1146 1146
1147 1147 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
1148 1148 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
1149 1149
1150 1150 ancestor = source_scm.get_common_ancestor(
1151 1151 source_commit.raw_id, target_commit.raw_id, target_scm)
1152 1152
1153 source_ref_type, source_ref_name, source_commit_id = _form['target_ref'].split(':')
1154 target_ref_type, target_ref_name, target_commit_id = _form['source_ref'].split(':')
1153 1155 # recalculate target ref based on ancestor
1154 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
1155 1156 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
1156 1157
1157 1158 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1158 1159 PullRequestModel().get_reviewer_functions()
1159 1160
1160 1161 # recalculate reviewers logic, to make sure we can validate this
1161 1162 reviewer_rules = get_default_reviewers_data(
1162 self._rhodecode_db_user, source_db_repo,
1163 source_commit, target_db_repo, target_commit)
1163 self._rhodecode_db_user,
1164 source_db_repo,
1165 Reference(source_ref_type, source_ref_name, source_commit_id),
1166 target_db_repo,
1167 Reference(target_ref_type, target_ref_name, target_commit_id),
1168 include_diff_info=False)
1164 1169
1165 1170 reviewers = validate_default_reviewers(_form['review_members'], reviewer_rules)
1166 1171 observers = validate_observers(_form['observer_members'], reviewer_rules)
1167 1172
1168 1173 pullrequest_title = _form['pullrequest_title']
1169 1174 title_source_ref = source_ref.split(':', 2)[1]
1170 1175 if not pullrequest_title:
1171 1176 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1172 1177 source=source_repo,
1173 1178 source_ref=title_source_ref,
1174 1179 target=target_repo
1175 1180 )
1176 1181
1177 1182 description = _form['pullrequest_desc']
1178 1183 description_renderer = _form['description_renderer']
1179 1184
1180 1185 try:
1181 1186 pull_request = PullRequestModel().create(
1182 1187 created_by=self._rhodecode_user.user_id,
1183 1188 source_repo=source_repo,
1184 1189 source_ref=source_ref,
1185 1190 target_repo=target_repo,
1186 1191 target_ref=target_ref,
1187 1192 revisions=commit_ids,
1188 1193 common_ancestor_id=common_ancestor_id,
1189 1194 reviewers=reviewers,
1190 1195 observers=observers,
1191 1196 title=pullrequest_title,
1192 1197 description=description,
1193 1198 description_renderer=description_renderer,
1194 1199 reviewer_data=reviewer_rules,
1195 1200 auth_user=self._rhodecode_user
1196 1201 )
1197 1202 Session().commit()
1198 1203
1199 1204 h.flash(_('Successfully opened new pull request'),
1200 1205 category='success')
1201 1206 except Exception:
1202 1207 msg = _('Error occurred during creation of this pull request.')
1203 1208 log.exception(msg)
1204 1209 h.flash(msg, category='error')
1205 1210
1206 1211 # copy the args back to redirect
1207 1212 org_query = self.request.GET.mixed()
1208 1213 raise HTTPFound(
1209 1214 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1210 1215 _query=org_query))
1211 1216
1212 1217 raise HTTPFound(
1213 1218 h.route_path('pullrequest_show', repo_name=target_repo,
1214 1219 pull_request_id=pull_request.pull_request_id))
1215 1220
1216 1221 @LoginRequired()
1217 1222 @NotAnonymous()
1218 1223 @HasRepoPermissionAnyDecorator(
1219 1224 'repository.read', 'repository.write', 'repository.admin')
1220 1225 @CSRFRequired()
1221 1226 @view_config(
1222 1227 route_name='pullrequest_update', request_method='POST',
1223 1228 renderer='json_ext')
1224 1229 def pull_request_update(self):
1225 1230 pull_request = PullRequest.get_or_404(
1226 1231 self.request.matchdict['pull_request_id'])
1227 1232 _ = self.request.translate
1228 1233
1229 1234 c = self.load_default_context()
1230 1235 redirect_url = None
1231 1236
1232 1237 if pull_request.is_closed():
1233 1238 log.debug('update: forbidden because pull request is closed')
1234 1239 msg = _(u'Cannot update closed pull requests.')
1235 1240 h.flash(msg, category='error')
1236 1241 return {'response': True,
1237 1242 'redirect_url': redirect_url}
1238 1243
1239 1244 is_state_changing = pull_request.is_state_changing()
1240 1245 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
1241 1246
1242 1247 # only owner or admin can update it
1243 1248 allowed_to_update = PullRequestModel().check_user_update(
1244 1249 pull_request, self._rhodecode_user)
1245 1250
1246 1251 if allowed_to_update:
1247 1252 controls = peppercorn.parse(self.request.POST.items())
1248 1253 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1249 1254
1250 1255 if 'review_members' in controls:
1251 1256 self._update_reviewers(
1252 1257 c,
1253 1258 pull_request, controls['review_members'],
1254 1259 pull_request.reviewer_data,
1255 1260 PullRequestReviewers.ROLE_REVIEWER)
1256 1261 elif 'observer_members' in controls:
1257 1262 self._update_reviewers(
1258 1263 c,
1259 1264 pull_request, controls['observer_members'],
1260 1265 pull_request.reviewer_data,
1261 1266 PullRequestReviewers.ROLE_OBSERVER)
1262 1267 elif str2bool(self.request.POST.get('update_commits', 'false')):
1263 1268 if is_state_changing:
1264 1269 log.debug('commits update: forbidden because pull request is in state %s',
1265 1270 pull_request.pull_request_state)
1266 1271 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1267 1272 u'Current state is: `{}`').format(
1268 1273 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1269 1274 h.flash(msg, category='error')
1270 1275 return {'response': True,
1271 1276 'redirect_url': redirect_url}
1272 1277
1273 1278 self._update_commits(c, pull_request)
1274 1279 if force_refresh:
1275 1280 redirect_url = h.route_path(
1276 1281 'pullrequest_show', repo_name=self.db_repo_name,
1277 1282 pull_request_id=pull_request.pull_request_id,
1278 1283 _query={"force_refresh": 1})
1279 1284 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1280 1285 self._edit_pull_request(pull_request)
1281 1286 else:
1282 1287 log.error('Unhandled update data.')
1283 1288 raise HTTPBadRequest()
1284 1289
1285 1290 return {'response': True,
1286 1291 'redirect_url': redirect_url}
1287 1292 raise HTTPForbidden()
1288 1293
1289 1294 def _edit_pull_request(self, pull_request):
1290 1295 """
1291 1296 Edit title and description
1292 1297 """
1293 1298 _ = self.request.translate
1294 1299
1295 1300 try:
1296 1301 PullRequestModel().edit(
1297 1302 pull_request,
1298 1303 self.request.POST.get('title'),
1299 1304 self.request.POST.get('description'),
1300 1305 self.request.POST.get('description_renderer'),
1301 1306 self._rhodecode_user)
1302 1307 except ValueError:
1303 1308 msg = _(u'Cannot update closed pull requests.')
1304 1309 h.flash(msg, category='error')
1305 1310 return
1306 1311 else:
1307 1312 Session().commit()
1308 1313
1309 1314 msg = _(u'Pull request title & description updated.')
1310 1315 h.flash(msg, category='success')
1311 1316 return
1312 1317
1313 1318 def _update_commits(self, c, pull_request):
1314 1319 _ = self.request.translate
1315 1320
1316 1321 with pull_request.set_state(PullRequest.STATE_UPDATING):
1317 1322 resp = PullRequestModel().update_commits(
1318 1323 pull_request, self._rhodecode_db_user)
1319 1324
1320 1325 if resp.executed:
1321 1326
1322 1327 if resp.target_changed and resp.source_changed:
1323 1328 changed = 'target and source repositories'
1324 1329 elif resp.target_changed and not resp.source_changed:
1325 1330 changed = 'target repository'
1326 1331 elif not resp.target_changed and resp.source_changed:
1327 1332 changed = 'source repository'
1328 1333 else:
1329 1334 changed = 'nothing'
1330 1335
1331 1336 msg = _(u'Pull request updated to "{source_commit_id}" with '
1332 1337 u'{count_added} added, {count_removed} removed commits. '
1333 1338 u'Source of changes: {change_source}.')
1334 1339 msg = msg.format(
1335 1340 source_commit_id=pull_request.source_ref_parts.commit_id,
1336 1341 count_added=len(resp.changes.added),
1337 1342 count_removed=len(resp.changes.removed),
1338 1343 change_source=changed)
1339 1344 h.flash(msg, category='success')
1340 1345 channelstream.pr_update_channelstream_push(
1341 1346 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1342 1347 else:
1343 1348 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1344 1349 warning_reasons = [
1345 1350 UpdateFailureReason.NO_CHANGE,
1346 1351 UpdateFailureReason.WRONG_REF_TYPE,
1347 1352 ]
1348 1353 category = 'warning' if resp.reason in warning_reasons else 'error'
1349 1354 h.flash(msg, category=category)
1350 1355
1351 1356 def _update_reviewers(self, c, pull_request, review_members, reviewer_rules, role):
1352 1357 _ = self.request.translate
1353 1358
1354 1359 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1355 1360 PullRequestModel().get_reviewer_functions()
1356 1361
1357 1362 if role == PullRequestReviewers.ROLE_REVIEWER:
1358 1363 try:
1359 1364 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1360 1365 except ValueError as e:
1361 1366 log.error('Reviewers Validation: {}'.format(e))
1362 1367 h.flash(e, category='error')
1363 1368 return
1364 1369
1365 1370 old_calculated_status = pull_request.calculated_review_status()
1366 1371 PullRequestModel().update_reviewers(
1367 1372 pull_request, reviewers, self._rhodecode_user)
1368 1373
1369 1374 Session().commit()
1370 1375
1371 1376 msg = _('Pull request reviewers updated.')
1372 1377 h.flash(msg, category='success')
1373 1378 channelstream.pr_update_channelstream_push(
1374 1379 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1375 1380
1376 1381 # trigger status changed if change in reviewers changes the status
1377 1382 calculated_status = pull_request.calculated_review_status()
1378 1383 if old_calculated_status != calculated_status:
1379 1384 PullRequestModel().trigger_pull_request_hook(
1380 1385 pull_request, self._rhodecode_user, 'review_status_change',
1381 1386 data={'status': calculated_status})
1382 1387
1383 1388 elif role == PullRequestReviewers.ROLE_OBSERVER:
1384 1389 try:
1385 1390 observers = validate_observers(review_members, reviewer_rules)
1386 1391 except ValueError as e:
1387 1392 log.error('Observers Validation: {}'.format(e))
1388 1393 h.flash(e, category='error')
1389 1394 return
1390 1395
1391 1396 PullRequestModel().update_observers(
1392 1397 pull_request, observers, self._rhodecode_user)
1393 1398
1394 1399 Session().commit()
1395 1400 msg = _('Pull request observers updated.')
1396 1401 h.flash(msg, category='success')
1397 1402 channelstream.pr_update_channelstream_push(
1398 1403 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1399 1404
1400 1405 @LoginRequired()
1401 1406 @NotAnonymous()
1402 1407 @HasRepoPermissionAnyDecorator(
1403 1408 'repository.read', 'repository.write', 'repository.admin')
1404 1409 @CSRFRequired()
1405 1410 @view_config(
1406 1411 route_name='pullrequest_merge', request_method='POST',
1407 1412 renderer='json_ext')
1408 1413 def pull_request_merge(self):
1409 1414 """
1410 1415 Merge will perform a server-side merge of the specified
1411 1416 pull request, if the pull request is approved and mergeable.
1412 1417 After successful merging, the pull request is automatically
1413 1418 closed, with a relevant comment.
1414 1419 """
1415 1420 pull_request = PullRequest.get_or_404(
1416 1421 self.request.matchdict['pull_request_id'])
1417 1422 _ = self.request.translate
1418 1423
1419 1424 if pull_request.is_state_changing():
1420 1425 log.debug('show: forbidden because pull request is in state %s',
1421 1426 pull_request.pull_request_state)
1422 1427 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1423 1428 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1424 1429 pull_request.pull_request_state)
1425 1430 h.flash(msg, category='error')
1426 1431 raise HTTPFound(
1427 1432 h.route_path('pullrequest_show',
1428 1433 repo_name=pull_request.target_repo.repo_name,
1429 1434 pull_request_id=pull_request.pull_request_id))
1430 1435
1431 1436 self.load_default_context()
1432 1437
1433 1438 with pull_request.set_state(PullRequest.STATE_UPDATING):
1434 1439 check = MergeCheck.validate(
1435 1440 pull_request, auth_user=self._rhodecode_user,
1436 1441 translator=self.request.translate)
1437 1442 merge_possible = not check.failed
1438 1443
1439 1444 for err_type, error_msg in check.errors:
1440 1445 h.flash(error_msg, category=err_type)
1441 1446
1442 1447 if merge_possible:
1443 1448 log.debug("Pre-conditions checked, trying to merge.")
1444 1449 extras = vcs_operation_context(
1445 1450 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1446 1451 username=self._rhodecode_db_user.username, action='push',
1447 1452 scm=pull_request.target_repo.repo_type)
1448 1453 with pull_request.set_state(PullRequest.STATE_UPDATING):
1449 1454 self._merge_pull_request(
1450 1455 pull_request, self._rhodecode_db_user, extras)
1451 1456 else:
1452 1457 log.debug("Pre-conditions failed, NOT merging.")
1453 1458
1454 1459 raise HTTPFound(
1455 1460 h.route_path('pullrequest_show',
1456 1461 repo_name=pull_request.target_repo.repo_name,
1457 1462 pull_request_id=pull_request.pull_request_id))
1458 1463
1459 1464 def _merge_pull_request(self, pull_request, user, extras):
1460 1465 _ = self.request.translate
1461 1466 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1462 1467
1463 1468 if merge_resp.executed:
1464 1469 log.debug("The merge was successful, closing the pull request.")
1465 1470 PullRequestModel().close_pull_request(
1466 1471 pull_request.pull_request_id, user)
1467 1472 Session().commit()
1468 1473 msg = _('Pull request was successfully merged and closed.')
1469 1474 h.flash(msg, category='success')
1470 1475 else:
1471 1476 log.debug(
1472 1477 "The merge was not successful. Merge response: %s", merge_resp)
1473 1478 msg = merge_resp.merge_status_message
1474 1479 h.flash(msg, category='error')
1475 1480
1476 1481 @LoginRequired()
1477 1482 @NotAnonymous()
1478 1483 @HasRepoPermissionAnyDecorator(
1479 1484 'repository.read', 'repository.write', 'repository.admin')
1480 1485 @CSRFRequired()
1481 1486 @view_config(
1482 1487 route_name='pullrequest_delete', request_method='POST',
1483 1488 renderer='json_ext')
1484 1489 def pull_request_delete(self):
1485 1490 _ = self.request.translate
1486 1491
1487 1492 pull_request = PullRequest.get_or_404(
1488 1493 self.request.matchdict['pull_request_id'])
1489 1494 self.load_default_context()
1490 1495
1491 1496 pr_closed = pull_request.is_closed()
1492 1497 allowed_to_delete = PullRequestModel().check_user_delete(
1493 1498 pull_request, self._rhodecode_user) and not pr_closed
1494 1499
1495 1500 # only owner can delete it !
1496 1501 if allowed_to_delete:
1497 1502 PullRequestModel().delete(pull_request, self._rhodecode_user)
1498 1503 Session().commit()
1499 1504 h.flash(_('Successfully deleted pull request'),
1500 1505 category='success')
1501 1506 raise HTTPFound(h.route_path('pullrequest_show_all',
1502 1507 repo_name=self.db_repo_name))
1503 1508
1504 1509 log.warning('user %s tried to delete pull request without access',
1505 1510 self._rhodecode_user)
1506 1511 raise HTTPNotFound()
1507 1512
1508 1513 @LoginRequired()
1509 1514 @NotAnonymous()
1510 1515 @HasRepoPermissionAnyDecorator(
1511 1516 'repository.read', 'repository.write', 'repository.admin')
1512 1517 @CSRFRequired()
1513 1518 @view_config(
1514 1519 route_name='pullrequest_comment_create', request_method='POST',
1515 1520 renderer='json_ext')
1516 1521 def pull_request_comment_create(self):
1517 1522 _ = self.request.translate
1518 1523
1519 1524 pull_request = PullRequest.get_or_404(
1520 1525 self.request.matchdict['pull_request_id'])
1521 1526 pull_request_id = pull_request.pull_request_id
1522 1527
1523 1528 if pull_request.is_closed():
1524 1529 log.debug('comment: forbidden because pull request is closed')
1525 1530 raise HTTPForbidden()
1526 1531
1527 1532 allowed_to_comment = PullRequestModel().check_user_comment(
1528 1533 pull_request, self._rhodecode_user)
1529 1534 if not allowed_to_comment:
1530 1535 log.debug('comment: forbidden because pull request is from forbidden repo')
1531 1536 raise HTTPForbidden()
1532 1537
1533 1538 c = self.load_default_context()
1534 1539
1535 1540 status = self.request.POST.get('changeset_status', None)
1536 1541 text = self.request.POST.get('text')
1537 1542 comment_type = self.request.POST.get('comment_type')
1538 1543 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1539 1544 close_pull_request = self.request.POST.get('close_pull_request')
1540 1545
1541 1546 # the logic here should work like following, if we submit close
1542 1547 # pr comment, use `close_pull_request_with_comment` function
1543 1548 # else handle regular comment logic
1544 1549
1545 1550 if close_pull_request:
1546 1551 # only owner or admin or person with write permissions
1547 1552 allowed_to_close = PullRequestModel().check_user_update(
1548 1553 pull_request, self._rhodecode_user)
1549 1554 if not allowed_to_close:
1550 1555 log.debug('comment: forbidden because not allowed to close '
1551 1556 'pull request %s', pull_request_id)
1552 1557 raise HTTPForbidden()
1553 1558
1554 1559 # This also triggers `review_status_change`
1555 1560 comment, status = PullRequestModel().close_pull_request_with_comment(
1556 1561 pull_request, self._rhodecode_user, self.db_repo, message=text,
1557 1562 auth_user=self._rhodecode_user)
1558 1563 Session().flush()
1559 1564
1560 1565 PullRequestModel().trigger_pull_request_hook(
1561 1566 pull_request, self._rhodecode_user, 'comment',
1562 1567 data={'comment': comment})
1563 1568
1564 1569 else:
1565 1570 # regular comment case, could be inline, or one with status.
1566 1571 # for that one we check also permissions
1567 1572
1568 1573 allowed_to_change_status = PullRequestModel().check_user_change_status(
1569 1574 pull_request, self._rhodecode_user)
1570 1575
1571 1576 if status and allowed_to_change_status:
1572 1577 message = (_('Status change %(transition_icon)s %(status)s')
1573 1578 % {'transition_icon': '>',
1574 1579 'status': ChangesetStatus.get_status_lbl(status)})
1575 1580 text = text or message
1576 1581
1577 1582 comment = CommentsModel().create(
1578 1583 text=text,
1579 1584 repo=self.db_repo.repo_id,
1580 1585 user=self._rhodecode_user.user_id,
1581 1586 pull_request=pull_request,
1582 1587 f_path=self.request.POST.get('f_path'),
1583 1588 line_no=self.request.POST.get('line'),
1584 1589 status_change=(ChangesetStatus.get_status_lbl(status)
1585 1590 if status and allowed_to_change_status else None),
1586 1591 status_change_type=(status
1587 1592 if status and allowed_to_change_status else None),
1588 1593 comment_type=comment_type,
1589 1594 resolves_comment_id=resolves_comment_id,
1590 1595 auth_user=self._rhodecode_user
1591 1596 )
1592 1597 is_inline = bool(comment.f_path and comment.line_no)
1593 1598
1594 1599 if allowed_to_change_status:
1595 1600 # calculate old status before we change it
1596 1601 old_calculated_status = pull_request.calculated_review_status()
1597 1602
1598 1603 # get status if set !
1599 1604 if status:
1600 1605 ChangesetStatusModel().set_status(
1601 1606 self.db_repo.repo_id,
1602 1607 status,
1603 1608 self._rhodecode_user.user_id,
1604 1609 comment,
1605 1610 pull_request=pull_request
1606 1611 )
1607 1612
1608 1613 Session().flush()
1609 1614 # this is somehow required to get access to some relationship
1610 1615 # loaded on comment
1611 1616 Session().refresh(comment)
1612 1617
1613 1618 PullRequestModel().trigger_pull_request_hook(
1614 1619 pull_request, self._rhodecode_user, 'comment',
1615 1620 data={'comment': comment})
1616 1621
1617 1622 # we now calculate the status of pull request, and based on that
1618 1623 # calculation we set the commits status
1619 1624 calculated_status = pull_request.calculated_review_status()
1620 1625 if old_calculated_status != calculated_status:
1621 1626 PullRequestModel().trigger_pull_request_hook(
1622 1627 pull_request, self._rhodecode_user, 'review_status_change',
1623 1628 data={'status': calculated_status})
1624 1629
1625 1630 Session().commit()
1626 1631
1627 1632 data = {
1628 1633 'target_id': h.safeid(h.safe_unicode(
1629 1634 self.request.POST.get('f_path'))),
1630 1635 }
1631 1636 if comment:
1632 1637 c.co = comment
1633 1638 c.at_version_num = None
1634 1639 rendered_comment = render(
1635 1640 'rhodecode:templates/changeset/changeset_comment_block.mako',
1636 1641 self._get_template_context(c), self.request)
1637 1642
1638 1643 data.update(comment.get_dict())
1639 1644 data.update({'rendered_text': rendered_comment})
1640 1645
1641 1646 comment_broadcast_channel = channelstream.comment_channel(
1642 1647 self.db_repo_name, pull_request_obj=pull_request)
1643 1648
1644 1649 comment_data = data
1645 1650 comment_type = 'inline' if is_inline else 'general'
1646 1651 channelstream.comment_channelstream_push(
1647 1652 self.request, comment_broadcast_channel, self._rhodecode_user,
1648 1653 _('posted a new {} comment').format(comment_type),
1649 1654 comment_data=comment_data)
1650 1655
1651 1656 return data
1652 1657
1653 1658 @LoginRequired()
1654 1659 @NotAnonymous()
1655 1660 @HasRepoPermissionAnyDecorator(
1656 1661 'repository.read', 'repository.write', 'repository.admin')
1657 1662 @CSRFRequired()
1658 1663 @view_config(
1659 1664 route_name='pullrequest_comment_delete', request_method='POST',
1660 1665 renderer='json_ext')
1661 1666 def pull_request_comment_delete(self):
1662 1667 pull_request = PullRequest.get_or_404(
1663 1668 self.request.matchdict['pull_request_id'])
1664 1669
1665 1670 comment = ChangesetComment.get_or_404(
1666 1671 self.request.matchdict['comment_id'])
1667 1672 comment_id = comment.comment_id
1668 1673
1669 1674 if comment.immutable:
1670 1675 # don't allow deleting comments that are immutable
1671 1676 raise HTTPForbidden()
1672 1677
1673 1678 if pull_request.is_closed():
1674 1679 log.debug('comment: forbidden because pull request is closed')
1675 1680 raise HTTPForbidden()
1676 1681
1677 1682 if not comment:
1678 1683 log.debug('Comment with id:%s not found, skipping', comment_id)
1679 1684 # comment already deleted in another call probably
1680 1685 return True
1681 1686
1682 1687 if comment.pull_request.is_closed():
1683 1688 # don't allow deleting comments on closed pull request
1684 1689 raise HTTPForbidden()
1685 1690
1686 1691 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1687 1692 super_admin = h.HasPermissionAny('hg.admin')()
1688 1693 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1689 1694 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1690 1695 comment_repo_admin = is_repo_admin and is_repo_comment
1691 1696
1692 1697 if super_admin or comment_owner or comment_repo_admin:
1693 1698 old_calculated_status = comment.pull_request.calculated_review_status()
1694 1699 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1695 1700 Session().commit()
1696 1701 calculated_status = comment.pull_request.calculated_review_status()
1697 1702 if old_calculated_status != calculated_status:
1698 1703 PullRequestModel().trigger_pull_request_hook(
1699 1704 comment.pull_request, self._rhodecode_user, 'review_status_change',
1700 1705 data={'status': calculated_status})
1701 1706 return True
1702 1707 else:
1703 1708 log.warning('No permissions for user %s to delete comment_id: %s',
1704 1709 self._rhodecode_db_user, comment_id)
1705 1710 raise HTTPNotFound()
1706 1711
1707 1712 @LoginRequired()
1708 1713 @NotAnonymous()
1709 1714 @HasRepoPermissionAnyDecorator(
1710 1715 'repository.read', 'repository.write', 'repository.admin')
1711 1716 @CSRFRequired()
1712 1717 @view_config(
1713 1718 route_name='pullrequest_comment_edit', request_method='POST',
1714 1719 renderer='json_ext')
1715 1720 def pull_request_comment_edit(self):
1716 1721 self.load_default_context()
1717 1722
1718 1723 pull_request = PullRequest.get_or_404(
1719 1724 self.request.matchdict['pull_request_id']
1720 1725 )
1721 1726 comment = ChangesetComment.get_or_404(
1722 1727 self.request.matchdict['comment_id']
1723 1728 )
1724 1729 comment_id = comment.comment_id
1725 1730
1726 1731 if comment.immutable:
1727 1732 # don't allow deleting comments that are immutable
1728 1733 raise HTTPForbidden()
1729 1734
1730 1735 if pull_request.is_closed():
1731 1736 log.debug('comment: forbidden because pull request is closed')
1732 1737 raise HTTPForbidden()
1733 1738
1734 1739 if not comment:
1735 1740 log.debug('Comment with id:%s not found, skipping', comment_id)
1736 1741 # comment already deleted in another call probably
1737 1742 return True
1738 1743
1739 1744 if comment.pull_request.is_closed():
1740 1745 # don't allow deleting comments on closed pull request
1741 1746 raise HTTPForbidden()
1742 1747
1743 1748 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1744 1749 super_admin = h.HasPermissionAny('hg.admin')()
1745 1750 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1746 1751 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1747 1752 comment_repo_admin = is_repo_admin and is_repo_comment
1748 1753
1749 1754 if super_admin or comment_owner or comment_repo_admin:
1750 1755 text = self.request.POST.get('text')
1751 1756 version = self.request.POST.get('version')
1752 1757 if text == comment.text:
1753 1758 log.warning(
1754 1759 'Comment(PR): '
1755 1760 'Trying to create new version '
1756 1761 'with the same comment body {}'.format(
1757 1762 comment_id,
1758 1763 )
1759 1764 )
1760 1765 raise HTTPNotFound()
1761 1766
1762 1767 if version.isdigit():
1763 1768 version = int(version)
1764 1769 else:
1765 1770 log.warning(
1766 1771 'Comment(PR): Wrong version type {} {} '
1767 1772 'for comment {}'.format(
1768 1773 version,
1769 1774 type(version),
1770 1775 comment_id,
1771 1776 )
1772 1777 )
1773 1778 raise HTTPNotFound()
1774 1779
1775 1780 try:
1776 1781 comment_history = CommentsModel().edit(
1777 1782 comment_id=comment_id,
1778 1783 text=text,
1779 1784 auth_user=self._rhodecode_user,
1780 1785 version=version,
1781 1786 )
1782 1787 except CommentVersionMismatch:
1783 1788 raise HTTPConflict()
1784 1789
1785 1790 if not comment_history:
1786 1791 raise HTTPNotFound()
1787 1792
1788 1793 Session().commit()
1789 1794
1790 1795 PullRequestModel().trigger_pull_request_hook(
1791 1796 pull_request, self._rhodecode_user, 'comment_edit',
1792 1797 data={'comment': comment})
1793 1798
1794 1799 return {
1795 1800 'comment_history_id': comment_history.comment_history_id,
1796 1801 'comment_id': comment.comment_id,
1797 1802 'comment_version': comment_history.version,
1798 1803 'comment_author_username': comment_history.author.username,
1799 1804 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
1800 1805 'comment_created_on': h.age_component(comment_history.created_on,
1801 1806 time_is_local=True),
1802 1807 }
1803 1808 else:
1804 1809 log.warning('No permissions for user %s to edit comment_id: %s',
1805 1810 self._rhodecode_db_user, comment_id)
1806 1811 raise HTTPNotFound()
@@ -1,73 +1,80 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 from pyramid.view import view_config
24 24
25 25 from rhodecode.apps._base import RepoAppView
26 26 from rhodecode.apps.repository.utils import get_default_reviewers_data
27 27 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
28 from rhodecode.lib.vcs.backends.base import Reference
28 29 from rhodecode.model.db import Repository
29 30
30 31 log = logging.getLogger(__name__)
31 32
32 33
33 34 class RepoReviewRulesView(RepoAppView):
34 35 def load_default_context(self):
35 36 c = self._get_local_tmpl_context()
36 37 return c
37 38
38 39 @LoginRequired()
39 40 @HasRepoPermissionAnyDecorator('repository.admin')
40 41 @view_config(
41 42 route_name='repo_reviewers', request_method='GET',
42 43 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
43 44 def repo_review_rules(self):
44 45 c = self.load_default_context()
45 46 c.active = 'reviewers'
46 47
47 48 return self._get_template_context(c)
48 49
49 50 @LoginRequired()
50 51 @HasRepoPermissionAnyDecorator(
51 52 'repository.read', 'repository.write', 'repository.admin')
52 53 @view_config(
53 54 route_name='repo_default_reviewers_data', request_method='GET',
54 55 renderer='json_ext')
55 56 def repo_default_reviewers_data(self):
56 57 self.load_default_context()
57 58
58 59 request = self.request
59 60 source_repo = self.db_repo
60 61 source_repo_name = source_repo.repo_name
61 62 target_repo_name = request.GET.get('target_repo', source_repo_name)
62 63 target_repo = Repository.get_by_repo_name(target_repo_name)
63 64
64 source_ref = request.GET['source_ref']
65 target_ref = request.GET['target_ref']
66 source_commit = source_repo.get_commit(source_ref)
67 target_commit = target_repo.get_commit(target_ref)
65 current_user = request.user.get_instance()
66
67 source_commit_id = request.GET['source_ref']
68 source_type = request.GET['source_ref_type']
69 source_name = request.GET['source_ref_name']
68 70
69 current_user = request.user.get_instance()
71 target_commit_id = request.GET['target_ref']
72 target_type = request.GET['target_ref_type']
73 target_name = request.GET['target_ref_name']
74
75 source_ref = Reference(source_type, source_name, source_commit_id)
76 target_ref = Reference(target_type, target_name, target_commit_id)
77
70 78 review_data = get_default_reviewers_data(
71 current_user, source_repo, source_commit, target_repo, target_commit)
72
79 current_user, source_repo, source_ref, target_repo, target_ref)
73 80 return review_data
@@ -1,1912 +1,1925 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from pyramid import compat
38 38
39 39 import rhodecode
40 40 from rhodecode.translation import lazy_ugettext
41 41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 42 from rhodecode.lib.vcs import connection
43 43 from rhodecode.lib.vcs.utils import author_name, author_email
44 44 from rhodecode.lib.vcs.conf import settings
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 50 RepositoryError)
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 FILEMODE_DEFAULT = 0o100644
57 57 FILEMODE_EXECUTABLE = 0o100755
58 58 EMPTY_COMMIT_ID = '0' * 40
59 59
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
60 _Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61
62
63 class Reference(_Reference):
64
65 @property
66 def branch(self):
67 if self.type == 'branch':
68 return self.name
69
70 @property
71 def bookmark(self):
72 if self.type == 'book':
73 return self.name
61 74
62 75
63 76 class MergeFailureReason(object):
64 77 """
65 78 Enumeration with all the reasons why the server side merge could fail.
66 79
67 80 DO NOT change the number of the reasons, as they may be stored in the
68 81 database.
69 82
70 83 Changing the name of a reason is acceptable and encouraged to deprecate old
71 84 reasons.
72 85 """
73 86
74 87 # Everything went well.
75 88 NONE = 0
76 89
77 90 # An unexpected exception was raised. Check the logs for more details.
78 91 UNKNOWN = 1
79 92
80 93 # The merge was not successful, there are conflicts.
81 94 MERGE_FAILED = 2
82 95
83 96 # The merge succeeded but we could not push it to the target repository.
84 97 PUSH_FAILED = 3
85 98
86 99 # The specified target is not a head in the target repository.
87 100 TARGET_IS_NOT_HEAD = 4
88 101
89 102 # The source repository contains more branches than the target. Pushing
90 103 # the merge will create additional branches in the target.
91 104 HG_SOURCE_HAS_MORE_BRANCHES = 5
92 105
93 106 # The target reference has multiple heads. That does not allow to correctly
94 107 # identify the target location. This could only happen for mercurial
95 108 # branches.
96 109 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97 110
98 111 # The target repository is locked
99 112 TARGET_IS_LOCKED = 7
100 113
101 114 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 115 # A involved commit could not be found.
103 116 _DEPRECATED_MISSING_COMMIT = 8
104 117
105 118 # The target repo reference is missing.
106 119 MISSING_TARGET_REF = 9
107 120
108 121 # The source repo reference is missing.
109 122 MISSING_SOURCE_REF = 10
110 123
111 124 # The merge was not successful, there are conflicts related to sub
112 125 # repositories.
113 126 SUBREPO_MERGE_FAILED = 11
114 127
115 128
116 129 class UpdateFailureReason(object):
117 130 """
118 131 Enumeration with all the reasons why the pull request update could fail.
119 132
120 133 DO NOT change the number of the reasons, as they may be stored in the
121 134 database.
122 135
123 136 Changing the name of a reason is acceptable and encouraged to deprecate old
124 137 reasons.
125 138 """
126 139
127 140 # Everything went well.
128 141 NONE = 0
129 142
130 143 # An unexpected exception was raised. Check the logs for more details.
131 144 UNKNOWN = 1
132 145
133 146 # The pull request is up to date.
134 147 NO_CHANGE = 2
135 148
136 149 # The pull request has a reference type that is not supported for update.
137 150 WRONG_REF_TYPE = 3
138 151
139 152 # Update failed because the target reference is missing.
140 153 MISSING_TARGET_REF = 4
141 154
142 155 # Update failed because the source reference is missing.
143 156 MISSING_SOURCE_REF = 5
144 157
145 158
146 159 class MergeResponse(object):
147 160
148 161 # uses .format(**metadata) for variables
149 162 MERGE_STATUS_MESSAGES = {
150 163 MergeFailureReason.NONE: lazy_ugettext(
151 164 u'This pull request can be automatically merged.'),
152 165 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 166 u'This pull request cannot be merged because of an unhandled exception. '
154 167 u'{exception}'),
155 168 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 169 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
157 170 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 171 u'This pull request could not be merged because push to '
159 172 u'target:`{target}@{merge_commit}` failed.'),
160 173 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 174 u'This pull request cannot be merged because the target '
162 175 u'`{target_ref.name}` is not a head.'),
163 176 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 177 u'This pull request cannot be merged because the source contains '
165 178 u'more branches than the target.'),
166 179 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 180 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 181 u'has multiple heads: `{heads}`.'),
169 182 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 183 u'This pull request cannot be merged because the target repository is '
171 184 u'locked by {locked_by}.'),
172 185
173 186 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 187 u'This pull request cannot be merged because the target '
175 188 u'reference `{target_ref.name}` is missing.'),
176 189 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 190 u'This pull request cannot be merged because the source '
178 191 u'reference `{source_ref.name}` is missing.'),
179 192 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 193 u'This pull request cannot be merged because of conflicts related '
181 194 u'to sub repositories.'),
182 195
183 196 # Deprecations
184 197 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 198 u'This pull request cannot be merged because the target or the '
186 199 u'source reference is missing.'),
187 200
188 201 }
189 202
190 203 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 204 self.possible = possible
192 205 self.executed = executed
193 206 self.merge_ref = merge_ref
194 207 self.failure_reason = failure_reason
195 208 self.metadata = metadata or {}
196 209
197 210 def __repr__(self):
198 211 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199 212
200 213 def __eq__(self, other):
201 214 same_instance = isinstance(other, self.__class__)
202 215 return same_instance \
203 216 and self.possible == other.possible \
204 217 and self.executed == other.executed \
205 218 and self.failure_reason == other.failure_reason
206 219
207 220 @property
208 221 def label(self):
209 222 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 223 not k.startswith('_'))
211 224 return label_dict.get(self.failure_reason)
212 225
213 226 @property
214 227 def merge_status_message(self):
215 228 """
216 229 Return a human friendly error message for the given merge status code.
217 230 """
218 231 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219 232
220 233 try:
221 234 return msg.format(**self.metadata)
222 235 except Exception:
223 236 log.exception('Failed to format %s message', self)
224 237 return msg
225 238
226 239 def asdict(self):
227 240 data = {}
228 241 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
229 242 'merge_status_message']:
230 243 data[k] = getattr(self, k)
231 244 return data
232 245
233 246
234 247 class TargetRefMissing(ValueError):
235 248 pass
236 249
237 250
238 251 class SourceRefMissing(ValueError):
239 252 pass
240 253
241 254
242 255 class BaseRepository(object):
243 256 """
244 257 Base Repository for final backends
245 258
246 259 .. attribute:: DEFAULT_BRANCH_NAME
247 260
248 261 name of default branch (i.e. "trunk" for svn, "master" for git etc.
249 262
250 263 .. attribute:: commit_ids
251 264
252 265 list of all available commit ids, in ascending order
253 266
254 267 .. attribute:: path
255 268
256 269 absolute path to the repository
257 270
258 271 .. attribute:: bookmarks
259 272
260 273 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
261 274 there are no bookmarks or the backend implementation does not support
262 275 bookmarks.
263 276
264 277 .. attribute:: tags
265 278
266 279 Mapping from name to :term:`Commit ID` of the tag.
267 280
268 281 """
269 282
270 283 DEFAULT_BRANCH_NAME = None
271 284 DEFAULT_CONTACT = u"Unknown"
272 285 DEFAULT_DESCRIPTION = u"unknown"
273 286 EMPTY_COMMIT_ID = '0' * 40
274 287
275 288 path = None
276 289
277 290 _is_empty = None
278 291 _commit_ids = {}
279 292
280 293 def __init__(self, repo_path, config=None, create=False, **kwargs):
281 294 """
282 295 Initializes repository. Raises RepositoryError if repository could
283 296 not be find at the given ``repo_path`` or directory at ``repo_path``
284 297 exists and ``create`` is set to True.
285 298
286 299 :param repo_path: local path of the repository
287 300 :param config: repository configuration
288 301 :param create=False: if set to True, would try to create repository.
289 302 :param src_url=None: if set, should be proper url from which repository
290 303 would be cloned; requires ``create`` parameter to be set to True -
291 304 raises RepositoryError if src_url is set and create evaluates to
292 305 False
293 306 """
294 307 raise NotImplementedError
295 308
296 309 def __repr__(self):
297 310 return '<%s at %s>' % (self.__class__.__name__, self.path)
298 311
299 312 def __len__(self):
300 313 return self.count()
301 314
302 315 def __eq__(self, other):
303 316 same_instance = isinstance(other, self.__class__)
304 317 return same_instance and other.path == self.path
305 318
306 319 def __ne__(self, other):
307 320 return not self.__eq__(other)
308 321
309 322 def get_create_shadow_cache_pr_path(self, db_repo):
310 323 path = db_repo.cached_diffs_dir
311 324 if not os.path.exists(path):
312 325 os.makedirs(path, 0o755)
313 326 return path
314 327
315 328 @classmethod
316 329 def get_default_config(cls, default=None):
317 330 config = Config()
318 331 if default and isinstance(default, list):
319 332 for section, key, val in default:
320 333 config.set(section, key, val)
321 334 return config
322 335
323 336 @LazyProperty
324 337 def _remote(self):
325 338 raise NotImplementedError
326 339
327 340 def _heads(self, branch=None):
328 341 return []
329 342
330 343 @LazyProperty
331 344 def EMPTY_COMMIT(self):
332 345 return EmptyCommit(self.EMPTY_COMMIT_ID)
333 346
334 347 @LazyProperty
335 348 def alias(self):
336 349 for k, v in settings.BACKENDS.items():
337 350 if v.split('.')[-1] == str(self.__class__.__name__):
338 351 return k
339 352
340 353 @LazyProperty
341 354 def name(self):
342 355 return safe_unicode(os.path.basename(self.path))
343 356
344 357 @LazyProperty
345 358 def description(self):
346 359 raise NotImplementedError
347 360
348 361 def refs(self):
349 362 """
350 363 returns a `dict` with branches, bookmarks, tags, and closed_branches
351 364 for this repository
352 365 """
353 366 return dict(
354 367 branches=self.branches,
355 368 branches_closed=self.branches_closed,
356 369 tags=self.tags,
357 370 bookmarks=self.bookmarks
358 371 )
359 372
360 373 @LazyProperty
361 374 def branches(self):
362 375 """
363 376 A `dict` which maps branch names to commit ids.
364 377 """
365 378 raise NotImplementedError
366 379
367 380 @LazyProperty
368 381 def branches_closed(self):
369 382 """
370 383 A `dict` which maps tags names to commit ids.
371 384 """
372 385 raise NotImplementedError
373 386
374 387 @LazyProperty
375 388 def bookmarks(self):
376 389 """
377 390 A `dict` which maps tags names to commit ids.
378 391 """
379 392 raise NotImplementedError
380 393
381 394 @LazyProperty
382 395 def tags(self):
383 396 """
384 397 A `dict` which maps tags names to commit ids.
385 398 """
386 399 raise NotImplementedError
387 400
388 401 @LazyProperty
389 402 def size(self):
390 403 """
391 404 Returns combined size in bytes for all repository files
392 405 """
393 406 tip = self.get_commit()
394 407 return tip.size
395 408
396 409 def size_at_commit(self, commit_id):
397 410 commit = self.get_commit(commit_id)
398 411 return commit.size
399 412
400 413 def _check_for_empty(self):
401 414 no_commits = len(self._commit_ids) == 0
402 415 if no_commits:
403 416 # check on remote to be sure
404 417 return self._remote.is_empty()
405 418 else:
406 419 return False
407 420
408 421 def is_empty(self):
409 422 if rhodecode.is_test:
410 423 return self._check_for_empty()
411 424
412 425 if self._is_empty is None:
413 426 # cache empty for production, but not tests
414 427 self._is_empty = self._check_for_empty()
415 428
416 429 return self._is_empty
417 430
418 431 @staticmethod
419 432 def check_url(url, config):
420 433 """
421 434 Function will check given url and try to verify if it's a valid
422 435 link.
423 436 """
424 437 raise NotImplementedError
425 438
426 439 @staticmethod
427 440 def is_valid_repository(path):
428 441 """
429 442 Check if given `path` contains a valid repository of this backend
430 443 """
431 444 raise NotImplementedError
432 445
433 446 # ==========================================================================
434 447 # COMMITS
435 448 # ==========================================================================
436 449
437 450 @CachedProperty
438 451 def commit_ids(self):
439 452 raise NotImplementedError
440 453
441 454 def append_commit_id(self, commit_id):
442 455 if commit_id not in self.commit_ids:
443 456 self._rebuild_cache(self.commit_ids + [commit_id])
444 457
445 458 # clear cache
446 459 self._invalidate_prop_cache('commit_ids')
447 460 self._is_empty = False
448 461
449 462 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
450 463 translate_tag=None, maybe_unreachable=False):
451 464 """
452 465 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
453 466 are both None, most recent commit is returned.
454 467
455 468 :param pre_load: Optional. List of commit attributes to load.
456 469
457 470 :raises ``EmptyRepositoryError``: if there are no commits
458 471 """
459 472 raise NotImplementedError
460 473
461 474 def __iter__(self):
462 475 for commit_id in self.commit_ids:
463 476 yield self.get_commit(commit_id=commit_id)
464 477
465 478 def get_commits(
466 479 self, start_id=None, end_id=None, start_date=None, end_date=None,
467 480 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
468 481 """
469 482 Returns iterator of `BaseCommit` objects from start to end
470 483 not inclusive. This should behave just like a list, ie. end is not
471 484 inclusive.
472 485
473 486 :param start_id: None or str, must be a valid commit id
474 487 :param end_id: None or str, must be a valid commit id
475 488 :param start_date:
476 489 :param end_date:
477 490 :param branch_name:
478 491 :param show_hidden:
479 492 :param pre_load:
480 493 :param translate_tags:
481 494 """
482 495 raise NotImplementedError
483 496
484 497 def __getitem__(self, key):
485 498 """
486 499 Allows index based access to the commit objects of this repository.
487 500 """
488 501 pre_load = ["author", "branch", "date", "message", "parents"]
489 502 if isinstance(key, slice):
490 503 return self._get_range(key, pre_load)
491 504 return self.get_commit(commit_idx=key, pre_load=pre_load)
492 505
493 506 def _get_range(self, slice_obj, pre_load):
494 507 for commit_id in self.commit_ids.__getitem__(slice_obj):
495 508 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
496 509
497 510 def count(self):
498 511 return len(self.commit_ids)
499 512
500 513 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
501 514 """
502 515 Creates and returns a tag for the given ``commit_id``.
503 516
504 517 :param name: name for new tag
505 518 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
506 519 :param commit_id: commit id for which new tag would be created
507 520 :param message: message of the tag's commit
508 521 :param date: date of tag's commit
509 522
510 523 :raises TagAlreadyExistError: if tag with same name already exists
511 524 """
512 525 raise NotImplementedError
513 526
514 527 def remove_tag(self, name, user, message=None, date=None):
515 528 """
516 529 Removes tag with the given ``name``.
517 530
518 531 :param name: name of the tag to be removed
519 532 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
520 533 :param message: message of the tag's removal commit
521 534 :param date: date of tag's removal commit
522 535
523 536 :raises TagDoesNotExistError: if tag with given name does not exists
524 537 """
525 538 raise NotImplementedError
526 539
527 540 def get_diff(
528 541 self, commit1, commit2, path=None, ignore_whitespace=False,
529 542 context=3, path1=None):
530 543 """
531 544 Returns (git like) *diff*, as plain text. Shows changes introduced by
532 545 `commit2` since `commit1`.
533 546
534 547 :param commit1: Entry point from which diff is shown. Can be
535 548 ``self.EMPTY_COMMIT`` - in this case, patch showing all
536 549 the changes since empty state of the repository until `commit2`
537 550 :param commit2: Until which commit changes should be shown.
538 551 :param path: Can be set to a path of a file to create a diff of that
539 552 file. If `path1` is also set, this value is only associated to
540 553 `commit2`.
541 554 :param ignore_whitespace: If set to ``True``, would not show whitespace
542 555 changes. Defaults to ``False``.
543 556 :param context: How many lines before/after changed lines should be
544 557 shown. Defaults to ``3``.
545 558 :param path1: Can be set to a path to associate with `commit1`. This
546 559 parameter works only for backends which support diff generation for
547 560 different paths. Other backends will raise a `ValueError` if `path1`
548 561 is set and has a different value than `path`.
549 562 :param file_path: filter this diff by given path pattern
550 563 """
551 564 raise NotImplementedError
552 565
553 566 def strip(self, commit_id, branch=None):
554 567 """
555 568 Strip given commit_id from the repository
556 569 """
557 570 raise NotImplementedError
558 571
559 572 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
560 573 """
561 574 Return a latest common ancestor commit if one exists for this repo
562 575 `commit_id1` vs `commit_id2` from `repo2`.
563 576
564 577 :param commit_id1: Commit it from this repository to use as a
565 578 target for the comparison.
566 579 :param commit_id2: Source commit id to use for comparison.
567 580 :param repo2: Source repository to use for comparison.
568 581 """
569 582 raise NotImplementedError
570 583
571 584 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
572 585 """
573 586 Compare this repository's revision `commit_id1` with `commit_id2`.
574 587
575 588 Returns a tuple(commits, ancestor) that would be merged from
576 589 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
577 590 will be returned as ancestor.
578 591
579 592 :param commit_id1: Commit it from this repository to use as a
580 593 target for the comparison.
581 594 :param commit_id2: Source commit id to use for comparison.
582 595 :param repo2: Source repository to use for comparison.
583 596 :param merge: If set to ``True`` will do a merge compare which also
584 597 returns the common ancestor.
585 598 :param pre_load: Optional. List of commit attributes to load.
586 599 """
587 600 raise NotImplementedError
588 601
589 602 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
590 603 user_name='', user_email='', message='', dry_run=False,
591 604 use_rebase=False, close_branch=False):
592 605 """
593 606 Merge the revisions specified in `source_ref` from `source_repo`
594 607 onto the `target_ref` of this repository.
595 608
596 609 `source_ref` and `target_ref` are named tupls with the following
597 610 fields `type`, `name` and `commit_id`.
598 611
599 612 Returns a MergeResponse named tuple with the following fields
600 613 'possible', 'executed', 'source_commit', 'target_commit',
601 614 'merge_commit'.
602 615
603 616 :param repo_id: `repo_id` target repo id.
604 617 :param workspace_id: `workspace_id` unique identifier.
605 618 :param target_ref: `target_ref` points to the commit on top of which
606 619 the `source_ref` should be merged.
607 620 :param source_repo: The repository that contains the commits to be
608 621 merged.
609 622 :param source_ref: `source_ref` points to the topmost commit from
610 623 the `source_repo` which should be merged.
611 624 :param user_name: Merge commit `user_name`.
612 625 :param user_email: Merge commit `user_email`.
613 626 :param message: Merge commit `message`.
614 627 :param dry_run: If `True` the merge will not take place.
615 628 :param use_rebase: If `True` commits from the source will be rebased
616 629 on top of the target instead of being merged.
617 630 :param close_branch: If `True` branch will be close before merging it
618 631 """
619 632 if dry_run:
620 633 message = message or settings.MERGE_DRY_RUN_MESSAGE
621 634 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
622 635 user_name = user_name or settings.MERGE_DRY_RUN_USER
623 636 else:
624 637 if not user_name:
625 638 raise ValueError('user_name cannot be empty')
626 639 if not user_email:
627 640 raise ValueError('user_email cannot be empty')
628 641 if not message:
629 642 raise ValueError('message cannot be empty')
630 643
631 644 try:
632 645 return self._merge_repo(
633 646 repo_id, workspace_id, target_ref, source_repo,
634 647 source_ref, message, user_name, user_email, dry_run=dry_run,
635 648 use_rebase=use_rebase, close_branch=close_branch)
636 649 except RepositoryError as exc:
637 650 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
638 651 return MergeResponse(
639 652 False, False, None, MergeFailureReason.UNKNOWN,
640 653 metadata={'exception': str(exc)})
641 654
642 655 def _merge_repo(self, repo_id, workspace_id, target_ref,
643 656 source_repo, source_ref, merge_message,
644 657 merger_name, merger_email, dry_run=False,
645 658 use_rebase=False, close_branch=False):
646 659 """Internal implementation of merge."""
647 660 raise NotImplementedError
648 661
649 662 def _maybe_prepare_merge_workspace(
650 663 self, repo_id, workspace_id, target_ref, source_ref):
651 664 """
652 665 Create the merge workspace.
653 666
654 667 :param workspace_id: `workspace_id` unique identifier.
655 668 """
656 669 raise NotImplementedError
657 670
658 671 @classmethod
659 672 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
660 673 """
661 674 Legacy version that was used before. We still need it for
662 675 backward compat
663 676 """
664 677 return os.path.join(
665 678 os.path.dirname(repo_path),
666 679 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
667 680
668 681 @classmethod
669 682 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
670 683 # The name of the shadow repository must start with '.', so it is
671 684 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
672 685 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
673 686 if os.path.exists(legacy_repository_path):
674 687 return legacy_repository_path
675 688 else:
676 689 return os.path.join(
677 690 os.path.dirname(repo_path),
678 691 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
679 692
680 693 def cleanup_merge_workspace(self, repo_id, workspace_id):
681 694 """
682 695 Remove merge workspace.
683 696
684 697 This function MUST not fail in case there is no workspace associated to
685 698 the given `workspace_id`.
686 699
687 700 :param workspace_id: `workspace_id` unique identifier.
688 701 """
689 702 shadow_repository_path = self._get_shadow_repository_path(
690 703 self.path, repo_id, workspace_id)
691 704 shadow_repository_path_del = '{}.{}.delete'.format(
692 705 shadow_repository_path, time.time())
693 706
694 707 # move the shadow repo, so it never conflicts with the one used.
695 708 # we use this method because shutil.rmtree had some edge case problems
696 709 # removing symlinked repositories
697 710 if not os.path.isdir(shadow_repository_path):
698 711 return
699 712
700 713 shutil.move(shadow_repository_path, shadow_repository_path_del)
701 714 try:
702 715 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
703 716 except Exception:
704 717 log.exception('Failed to gracefully remove shadow repo under %s',
705 718 shadow_repository_path_del)
706 719 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
707 720
708 721 # ========== #
709 722 # COMMIT API #
710 723 # ========== #
711 724
712 725 @LazyProperty
713 726 def in_memory_commit(self):
714 727 """
715 728 Returns :class:`InMemoryCommit` object for this repository.
716 729 """
717 730 raise NotImplementedError
718 731
719 732 # ======================== #
720 733 # UTILITIES FOR SUBCLASSES #
721 734 # ======================== #
722 735
723 736 def _validate_diff_commits(self, commit1, commit2):
724 737 """
725 738 Validates that the given commits are related to this repository.
726 739
727 740 Intended as a utility for sub classes to have a consistent validation
728 741 of input parameters in methods like :meth:`get_diff`.
729 742 """
730 743 self._validate_commit(commit1)
731 744 self._validate_commit(commit2)
732 745 if (isinstance(commit1, EmptyCommit) and
733 746 isinstance(commit2, EmptyCommit)):
734 747 raise ValueError("Cannot compare two empty commits")
735 748
736 749 def _validate_commit(self, commit):
737 750 if not isinstance(commit, BaseCommit):
738 751 raise TypeError(
739 752 "%s is not of type BaseCommit" % repr(commit))
740 753 if commit.repository != self and not isinstance(commit, EmptyCommit):
741 754 raise ValueError(
742 755 "Commit %s must be a valid commit from this repository %s, "
743 756 "related to this repository instead %s." %
744 757 (commit, self, commit.repository))
745 758
746 759 def _validate_commit_id(self, commit_id):
747 760 if not isinstance(commit_id, compat.string_types):
748 761 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
749 762
750 763 def _validate_commit_idx(self, commit_idx):
751 764 if not isinstance(commit_idx, (int, long)):
752 765 raise TypeError("commit_idx must be a numeric value")
753 766
754 767 def _validate_branch_name(self, branch_name):
755 768 if branch_name and branch_name not in self.branches_all:
756 769 msg = ("Branch %s not found in %s" % (branch_name, self))
757 770 raise BranchDoesNotExistError(msg)
758 771
759 772 #
760 773 # Supporting deprecated API parts
761 774 # TODO: johbo: consider to move this into a mixin
762 775 #
763 776
764 777 @property
765 778 def EMPTY_CHANGESET(self):
766 779 warnings.warn(
767 780 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
768 781 return self.EMPTY_COMMIT_ID
769 782
770 783 @property
771 784 def revisions(self):
772 785 warnings.warn("Use commits attribute instead", DeprecationWarning)
773 786 return self.commit_ids
774 787
775 788 @revisions.setter
776 789 def revisions(self, value):
777 790 warnings.warn("Use commits attribute instead", DeprecationWarning)
778 791 self.commit_ids = value
779 792
780 793 def get_changeset(self, revision=None, pre_load=None):
781 794 warnings.warn("Use get_commit instead", DeprecationWarning)
782 795 commit_id = None
783 796 commit_idx = None
784 797 if isinstance(revision, compat.string_types):
785 798 commit_id = revision
786 799 else:
787 800 commit_idx = revision
788 801 return self.get_commit(
789 802 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
790 803
791 804 def get_changesets(
792 805 self, start=None, end=None, start_date=None, end_date=None,
793 806 branch_name=None, pre_load=None):
794 807 warnings.warn("Use get_commits instead", DeprecationWarning)
795 808 start_id = self._revision_to_commit(start)
796 809 end_id = self._revision_to_commit(end)
797 810 return self.get_commits(
798 811 start_id=start_id, end_id=end_id, start_date=start_date,
799 812 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
800 813
801 814 def _revision_to_commit(self, revision):
802 815 """
803 816 Translates a revision to a commit_id
804 817
805 818 Helps to support the old changeset based API which allows to use
806 819 commit ids and commit indices interchangeable.
807 820 """
808 821 if revision is None:
809 822 return revision
810 823
811 824 if isinstance(revision, compat.string_types):
812 825 commit_id = revision
813 826 else:
814 827 commit_id = self.commit_ids[revision]
815 828 return commit_id
816 829
817 830 @property
818 831 def in_memory_changeset(self):
819 832 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
820 833 return self.in_memory_commit
821 834
822 835 def get_path_permissions(self, username):
823 836 """
824 837 Returns a path permission checker or None if not supported
825 838
826 839 :param username: session user name
827 840 :return: an instance of BasePathPermissionChecker or None
828 841 """
829 842 return None
830 843
831 844 def install_hooks(self, force=False):
832 845 return self._remote.install_hooks(force)
833 846
834 847 def get_hooks_info(self):
835 848 return self._remote.get_hooks_info()
836 849
837 850
838 851 class BaseCommit(object):
839 852 """
840 853 Each backend should implement it's commit representation.
841 854
842 855 **Attributes**
843 856
844 857 ``repository``
845 858 repository object within which commit exists
846 859
847 860 ``id``
848 861 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
849 862 just ``tip``.
850 863
851 864 ``raw_id``
852 865 raw commit representation (i.e. full 40 length sha for git
853 866 backend)
854 867
855 868 ``short_id``
856 869 shortened (if apply) version of ``raw_id``; it would be simple
857 870 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
858 871 as ``raw_id`` for subversion
859 872
860 873 ``idx``
861 874 commit index
862 875
863 876 ``files``
864 877 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
865 878
866 879 ``dirs``
867 880 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
868 881
869 882 ``nodes``
870 883 combined list of ``Node`` objects
871 884
872 885 ``author``
873 886 author of the commit, as unicode
874 887
875 888 ``message``
876 889 message of the commit, as unicode
877 890
878 891 ``parents``
879 892 list of parent commits
880 893
881 894 """
882 895
883 896 branch = None
884 897 """
885 898 Depending on the backend this should be set to the branch name of the
886 899 commit. Backends not supporting branches on commits should leave this
887 900 value as ``None``.
888 901 """
889 902
890 903 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
891 904 """
892 905 This template is used to generate a default prefix for repository archives
893 906 if no prefix has been specified.
894 907 """
895 908
896 909 def __str__(self):
897 910 return '<%s at %s:%s>' % (
898 911 self.__class__.__name__, self.idx, self.short_id)
899 912
900 913 def __repr__(self):
901 914 return self.__str__()
902 915
903 916 def __unicode__(self):
904 917 return u'%s:%s' % (self.idx, self.short_id)
905 918
906 919 def __eq__(self, other):
907 920 same_instance = isinstance(other, self.__class__)
908 921 return same_instance and self.raw_id == other.raw_id
909 922
910 923 def __json__(self):
911 924 parents = []
912 925 try:
913 926 for parent in self.parents:
914 927 parents.append({'raw_id': parent.raw_id})
915 928 except NotImplementedError:
916 929 # empty commit doesn't have parents implemented
917 930 pass
918 931
919 932 return {
920 933 'short_id': self.short_id,
921 934 'raw_id': self.raw_id,
922 935 'revision': self.idx,
923 936 'message': self.message,
924 937 'date': self.date,
925 938 'author': self.author,
926 939 'parents': parents,
927 940 'branch': self.branch
928 941 }
929 942
930 943 def __getstate__(self):
931 944 d = self.__dict__.copy()
932 945 d.pop('_remote', None)
933 946 d.pop('repository', None)
934 947 return d
935 948
936 949 def serialize(self):
937 950 return self.__json__()
938 951
939 952 def _get_refs(self):
940 953 return {
941 954 'branches': [self.branch] if self.branch else [],
942 955 'bookmarks': getattr(self, 'bookmarks', []),
943 956 'tags': self.tags
944 957 }
945 958
946 959 @LazyProperty
947 960 def last(self):
948 961 """
949 962 ``True`` if this is last commit in repository, ``False``
950 963 otherwise; trying to access this attribute while there is no
951 964 commits would raise `EmptyRepositoryError`
952 965 """
953 966 if self.repository is None:
954 967 raise CommitError("Cannot check if it's most recent commit")
955 968 return self.raw_id == self.repository.commit_ids[-1]
956 969
957 970 @LazyProperty
958 971 def parents(self):
959 972 """
960 973 Returns list of parent commits.
961 974 """
962 975 raise NotImplementedError
963 976
964 977 @LazyProperty
965 978 def first_parent(self):
966 979 """
967 980 Returns list of parent commits.
968 981 """
969 982 return self.parents[0] if self.parents else EmptyCommit()
970 983
971 984 @property
972 985 def merge(self):
973 986 """
974 987 Returns boolean if commit is a merge.
975 988 """
976 989 return len(self.parents) > 1
977 990
978 991 @LazyProperty
979 992 def children(self):
980 993 """
981 994 Returns list of child commits.
982 995 """
983 996 raise NotImplementedError
984 997
985 998 @LazyProperty
986 999 def id(self):
987 1000 """
988 1001 Returns string identifying this commit.
989 1002 """
990 1003 raise NotImplementedError
991 1004
992 1005 @LazyProperty
993 1006 def raw_id(self):
994 1007 """
995 1008 Returns raw string identifying this commit.
996 1009 """
997 1010 raise NotImplementedError
998 1011
999 1012 @LazyProperty
1000 1013 def short_id(self):
1001 1014 """
1002 1015 Returns shortened version of ``raw_id`` attribute, as string,
1003 1016 identifying this commit, useful for presentation to users.
1004 1017 """
1005 1018 raise NotImplementedError
1006 1019
1007 1020 @LazyProperty
1008 1021 def idx(self):
1009 1022 """
1010 1023 Returns integer identifying this commit.
1011 1024 """
1012 1025 raise NotImplementedError
1013 1026
1014 1027 @LazyProperty
1015 1028 def committer(self):
1016 1029 """
1017 1030 Returns committer for this commit
1018 1031 """
1019 1032 raise NotImplementedError
1020 1033
1021 1034 @LazyProperty
1022 1035 def committer_name(self):
1023 1036 """
1024 1037 Returns committer name for this commit
1025 1038 """
1026 1039
1027 1040 return author_name(self.committer)
1028 1041
1029 1042 @LazyProperty
1030 1043 def committer_email(self):
1031 1044 """
1032 1045 Returns committer email address for this commit
1033 1046 """
1034 1047
1035 1048 return author_email(self.committer)
1036 1049
1037 1050 @LazyProperty
1038 1051 def author(self):
1039 1052 """
1040 1053 Returns author for this commit
1041 1054 """
1042 1055
1043 1056 raise NotImplementedError
1044 1057
1045 1058 @LazyProperty
1046 1059 def author_name(self):
1047 1060 """
1048 1061 Returns author name for this commit
1049 1062 """
1050 1063
1051 1064 return author_name(self.author)
1052 1065
1053 1066 @LazyProperty
1054 1067 def author_email(self):
1055 1068 """
1056 1069 Returns author email address for this commit
1057 1070 """
1058 1071
1059 1072 return author_email(self.author)
1060 1073
1061 1074 def get_file_mode(self, path):
1062 1075 """
1063 1076 Returns stat mode of the file at `path`.
1064 1077 """
1065 1078 raise NotImplementedError
1066 1079
1067 1080 def is_link(self, path):
1068 1081 """
1069 1082 Returns ``True`` if given `path` is a symlink
1070 1083 """
1071 1084 raise NotImplementedError
1072 1085
1073 1086 def is_node_binary(self, path):
1074 1087 """
1075 1088 Returns ``True`` is given path is a binary file
1076 1089 """
1077 1090 raise NotImplementedError
1078 1091
1079 1092 def get_file_content(self, path):
1080 1093 """
1081 1094 Returns content of the file at the given `path`.
1082 1095 """
1083 1096 raise NotImplementedError
1084 1097
1085 1098 def get_file_content_streamed(self, path):
1086 1099 """
1087 1100 returns a streaming response from vcsserver with file content
1088 1101 """
1089 1102 raise NotImplementedError
1090 1103
1091 1104 def get_file_size(self, path):
1092 1105 """
1093 1106 Returns size of the file at the given `path`.
1094 1107 """
1095 1108 raise NotImplementedError
1096 1109
1097 1110 def get_path_commit(self, path, pre_load=None):
1098 1111 """
1099 1112 Returns last commit of the file at the given `path`.
1100 1113
1101 1114 :param pre_load: Optional. List of commit attributes to load.
1102 1115 """
1103 1116 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1104 1117 if not commits:
1105 1118 raise RepositoryError(
1106 1119 'Failed to fetch history for path {}. '
1107 1120 'Please check if such path exists in your repository'.format(
1108 1121 path))
1109 1122 return commits[0]
1110 1123
1111 1124 def get_path_history(self, path, limit=None, pre_load=None):
1112 1125 """
1113 1126 Returns history of file as reversed list of :class:`BaseCommit`
1114 1127 objects for which file at given `path` has been modified.
1115 1128
1116 1129 :param limit: Optional. Allows to limit the size of the returned
1117 1130 history. This is intended as a hint to the underlying backend, so
1118 1131 that it can apply optimizations depending on the limit.
1119 1132 :param pre_load: Optional. List of commit attributes to load.
1120 1133 """
1121 1134 raise NotImplementedError
1122 1135
1123 1136 def get_file_annotate(self, path, pre_load=None):
1124 1137 """
1125 1138 Returns a generator of four element tuples with
1126 1139 lineno, sha, commit lazy loader and line
1127 1140
1128 1141 :param pre_load: Optional. List of commit attributes to load.
1129 1142 """
1130 1143 raise NotImplementedError
1131 1144
1132 1145 def get_nodes(self, path):
1133 1146 """
1134 1147 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1135 1148 state of commit at the given ``path``.
1136 1149
1137 1150 :raises ``CommitError``: if node at the given ``path`` is not
1138 1151 instance of ``DirNode``
1139 1152 """
1140 1153 raise NotImplementedError
1141 1154
1142 1155 def get_node(self, path):
1143 1156 """
1144 1157 Returns ``Node`` object from the given ``path``.
1145 1158
1146 1159 :raises ``NodeDoesNotExistError``: if there is no node at the given
1147 1160 ``path``
1148 1161 """
1149 1162 raise NotImplementedError
1150 1163
1151 1164 def get_largefile_node(self, path):
1152 1165 """
1153 1166 Returns the path to largefile from Mercurial/Git-lfs storage.
1154 1167 or None if it's not a largefile node
1155 1168 """
1156 1169 return None
1157 1170
1158 1171 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1159 1172 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1160 1173 """
1161 1174 Creates an archive containing the contents of the repository.
1162 1175
1163 1176 :param archive_dest_path: path to the file which to create the archive.
1164 1177 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1165 1178 :param prefix: name of root directory in archive.
1166 1179 Default is repository name and commit's short_id joined with dash:
1167 1180 ``"{repo_name}-{short_id}"``.
1168 1181 :param write_metadata: write a metadata file into archive.
1169 1182 :param mtime: custom modification time for archive creation, defaults
1170 1183 to time.time() if not given.
1171 1184 :param archive_at_path: pack files at this path (default '/')
1172 1185
1173 1186 :raise VCSError: If prefix has a problem.
1174 1187 """
1175 1188 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1176 1189 if kind not in allowed_kinds:
1177 1190 raise ImproperArchiveTypeError(
1178 1191 'Archive kind (%s) not supported use one of %s' %
1179 1192 (kind, allowed_kinds))
1180 1193
1181 1194 prefix = self._validate_archive_prefix(prefix)
1182 1195
1183 1196 mtime = mtime is not None or time.mktime(self.date.timetuple())
1184 1197
1185 1198 file_info = []
1186 1199 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1187 1200 for _r, _d, files in cur_rev.walk(archive_at_path):
1188 1201 for f in files:
1189 1202 f_path = os.path.join(prefix, f.path)
1190 1203 file_info.append(
1191 1204 (f_path, f.mode, f.is_link(), f.raw_bytes))
1192 1205
1193 1206 if write_metadata:
1194 1207 metadata = [
1195 1208 ('repo_name', self.repository.name),
1196 1209 ('commit_id', self.raw_id),
1197 1210 ('mtime', mtime),
1198 1211 ('branch', self.branch),
1199 1212 ('tags', ','.join(self.tags)),
1200 1213 ]
1201 1214 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1202 1215 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1203 1216
1204 1217 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1205 1218
1206 1219 def _validate_archive_prefix(self, prefix):
1207 1220 if prefix is None:
1208 1221 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1209 1222 repo_name=safe_str(self.repository.name),
1210 1223 short_id=self.short_id)
1211 1224 elif not isinstance(prefix, str):
1212 1225 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1213 1226 elif prefix.startswith('/'):
1214 1227 raise VCSError("Prefix cannot start with leading slash")
1215 1228 elif prefix.strip() == '':
1216 1229 raise VCSError("Prefix cannot be empty")
1217 1230 return prefix
1218 1231
1219 1232 @LazyProperty
1220 1233 def root(self):
1221 1234 """
1222 1235 Returns ``RootNode`` object for this commit.
1223 1236 """
1224 1237 return self.get_node('')
1225 1238
1226 1239 def next(self, branch=None):
1227 1240 """
1228 1241 Returns next commit from current, if branch is gives it will return
1229 1242 next commit belonging to this branch
1230 1243
1231 1244 :param branch: show commits within the given named branch
1232 1245 """
1233 1246 indexes = xrange(self.idx + 1, self.repository.count())
1234 1247 return self._find_next(indexes, branch)
1235 1248
1236 1249 def prev(self, branch=None):
1237 1250 """
1238 1251 Returns previous commit from current, if branch is gives it will
1239 1252 return previous commit belonging to this branch
1240 1253
1241 1254 :param branch: show commit within the given named branch
1242 1255 """
1243 1256 indexes = xrange(self.idx - 1, -1, -1)
1244 1257 return self._find_next(indexes, branch)
1245 1258
1246 1259 def _find_next(self, indexes, branch=None):
1247 1260 if branch and self.branch != branch:
1248 1261 raise VCSError('Branch option used on commit not belonging '
1249 1262 'to that branch')
1250 1263
1251 1264 for next_idx in indexes:
1252 1265 commit = self.repository.get_commit(commit_idx=next_idx)
1253 1266 if branch and branch != commit.branch:
1254 1267 continue
1255 1268 return commit
1256 1269 raise CommitDoesNotExistError
1257 1270
1258 1271 def diff(self, ignore_whitespace=True, context=3):
1259 1272 """
1260 1273 Returns a `Diff` object representing the change made by this commit.
1261 1274 """
1262 1275 parent = self.first_parent
1263 1276 diff = self.repository.get_diff(
1264 1277 parent, self,
1265 1278 ignore_whitespace=ignore_whitespace,
1266 1279 context=context)
1267 1280 return diff
1268 1281
1269 1282 @LazyProperty
1270 1283 def added(self):
1271 1284 """
1272 1285 Returns list of added ``FileNode`` objects.
1273 1286 """
1274 1287 raise NotImplementedError
1275 1288
1276 1289 @LazyProperty
1277 1290 def changed(self):
1278 1291 """
1279 1292 Returns list of modified ``FileNode`` objects.
1280 1293 """
1281 1294 raise NotImplementedError
1282 1295
1283 1296 @LazyProperty
1284 1297 def removed(self):
1285 1298 """
1286 1299 Returns list of removed ``FileNode`` objects.
1287 1300 """
1288 1301 raise NotImplementedError
1289 1302
1290 1303 @LazyProperty
1291 1304 def size(self):
1292 1305 """
1293 1306 Returns total number of bytes from contents of all filenodes.
1294 1307 """
1295 1308 return sum((node.size for node in self.get_filenodes_generator()))
1296 1309
1297 1310 def walk(self, topurl=''):
1298 1311 """
1299 1312 Similar to os.walk method. Insted of filesystem it walks through
1300 1313 commit starting at given ``topurl``. Returns generator of tuples
1301 1314 (topnode, dirnodes, filenodes).
1302 1315 """
1303 1316 topnode = self.get_node(topurl)
1304 1317 if not topnode.is_dir():
1305 1318 return
1306 1319 yield (topnode, topnode.dirs, topnode.files)
1307 1320 for dirnode in topnode.dirs:
1308 1321 for tup in self.walk(dirnode.path):
1309 1322 yield tup
1310 1323
1311 1324 def get_filenodes_generator(self):
1312 1325 """
1313 1326 Returns generator that yields *all* file nodes.
1314 1327 """
1315 1328 for topnode, dirs, files in self.walk():
1316 1329 for node in files:
1317 1330 yield node
1318 1331
1319 1332 #
1320 1333 # Utilities for sub classes to support consistent behavior
1321 1334 #
1322 1335
1323 1336 def no_node_at_path(self, path):
1324 1337 return NodeDoesNotExistError(
1325 1338 u"There is no file nor directory at the given path: "
1326 1339 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1327 1340
1328 1341 def _fix_path(self, path):
1329 1342 """
1330 1343 Paths are stored without trailing slash so we need to get rid off it if
1331 1344 needed.
1332 1345 """
1333 1346 return path.rstrip('/')
1334 1347
1335 1348 #
1336 1349 # Deprecated API based on changesets
1337 1350 #
1338 1351
1339 1352 @property
1340 1353 def revision(self):
1341 1354 warnings.warn("Use idx instead", DeprecationWarning)
1342 1355 return self.idx
1343 1356
1344 1357 @revision.setter
1345 1358 def revision(self, value):
1346 1359 warnings.warn("Use idx instead", DeprecationWarning)
1347 1360 self.idx = value
1348 1361
1349 1362 def get_file_changeset(self, path):
1350 1363 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1351 1364 return self.get_path_commit(path)
1352 1365
1353 1366
1354 1367 class BaseChangesetClass(type):
1355 1368
1356 1369 def __instancecheck__(self, instance):
1357 1370 return isinstance(instance, BaseCommit)
1358 1371
1359 1372
1360 1373 class BaseChangeset(BaseCommit):
1361 1374
1362 1375 __metaclass__ = BaseChangesetClass
1363 1376
1364 1377 def __new__(cls, *args, **kwargs):
1365 1378 warnings.warn(
1366 1379 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1367 1380 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1368 1381
1369 1382
1370 1383 class BaseInMemoryCommit(object):
1371 1384 """
1372 1385 Represents differences between repository's state (most recent head) and
1373 1386 changes made *in place*.
1374 1387
1375 1388 **Attributes**
1376 1389
1377 1390 ``repository``
1378 1391 repository object for this in-memory-commit
1379 1392
1380 1393 ``added``
1381 1394 list of ``FileNode`` objects marked as *added*
1382 1395
1383 1396 ``changed``
1384 1397 list of ``FileNode`` objects marked as *changed*
1385 1398
1386 1399 ``removed``
1387 1400 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1388 1401 *removed*
1389 1402
1390 1403 ``parents``
1391 1404 list of :class:`BaseCommit` instances representing parents of
1392 1405 in-memory commit. Should always be 2-element sequence.
1393 1406
1394 1407 """
1395 1408
1396 1409 def __init__(self, repository):
1397 1410 self.repository = repository
1398 1411 self.added = []
1399 1412 self.changed = []
1400 1413 self.removed = []
1401 1414 self.parents = []
1402 1415
1403 1416 def add(self, *filenodes):
1404 1417 """
1405 1418 Marks given ``FileNode`` objects as *to be committed*.
1406 1419
1407 1420 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1408 1421 latest commit
1409 1422 :raises ``NodeAlreadyAddedError``: if node with same path is already
1410 1423 marked as *added*
1411 1424 """
1412 1425 # Check if not already marked as *added* first
1413 1426 for node in filenodes:
1414 1427 if node.path in (n.path for n in self.added):
1415 1428 raise NodeAlreadyAddedError(
1416 1429 "Such FileNode %s is already marked for addition"
1417 1430 % node.path)
1418 1431 for node in filenodes:
1419 1432 self.added.append(node)
1420 1433
1421 1434 def change(self, *filenodes):
1422 1435 """
1423 1436 Marks given ``FileNode`` objects to be *changed* in next commit.
1424 1437
1425 1438 :raises ``EmptyRepositoryError``: if there are no commits yet
1426 1439 :raises ``NodeAlreadyExistsError``: if node with same path is already
1427 1440 marked to be *changed*
1428 1441 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1429 1442 marked to be *removed*
1430 1443 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1431 1444 commit
1432 1445 :raises ``NodeNotChangedError``: if node hasn't really be changed
1433 1446 """
1434 1447 for node in filenodes:
1435 1448 if node.path in (n.path for n in self.removed):
1436 1449 raise NodeAlreadyRemovedError(
1437 1450 "Node at %s is already marked as removed" % node.path)
1438 1451 try:
1439 1452 self.repository.get_commit()
1440 1453 except EmptyRepositoryError:
1441 1454 raise EmptyRepositoryError(
1442 1455 "Nothing to change - try to *add* new nodes rather than "
1443 1456 "changing them")
1444 1457 for node in filenodes:
1445 1458 if node.path in (n.path for n in self.changed):
1446 1459 raise NodeAlreadyChangedError(
1447 1460 "Node at '%s' is already marked as changed" % node.path)
1448 1461 self.changed.append(node)
1449 1462
1450 1463 def remove(self, *filenodes):
1451 1464 """
1452 1465 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1453 1466 *removed* in next commit.
1454 1467
1455 1468 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1456 1469 be *removed*
1457 1470 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1458 1471 be *changed*
1459 1472 """
1460 1473 for node in filenodes:
1461 1474 if node.path in (n.path for n in self.removed):
1462 1475 raise NodeAlreadyRemovedError(
1463 1476 "Node is already marked to for removal at %s" % node.path)
1464 1477 if node.path in (n.path for n in self.changed):
1465 1478 raise NodeAlreadyChangedError(
1466 1479 "Node is already marked to be changed at %s" % node.path)
1467 1480 # We only mark node as *removed* - real removal is done by
1468 1481 # commit method
1469 1482 self.removed.append(node)
1470 1483
1471 1484 def reset(self):
1472 1485 """
1473 1486 Resets this instance to initial state (cleans ``added``, ``changed``
1474 1487 and ``removed`` lists).
1475 1488 """
1476 1489 self.added = []
1477 1490 self.changed = []
1478 1491 self.removed = []
1479 1492 self.parents = []
1480 1493
1481 1494 def get_ipaths(self):
1482 1495 """
1483 1496 Returns generator of paths from nodes marked as added, changed or
1484 1497 removed.
1485 1498 """
1486 1499 for node in itertools.chain(self.added, self.changed, self.removed):
1487 1500 yield node.path
1488 1501
1489 1502 def get_paths(self):
1490 1503 """
1491 1504 Returns list of paths from nodes marked as added, changed or removed.
1492 1505 """
1493 1506 return list(self.get_ipaths())
1494 1507
1495 1508 def check_integrity(self, parents=None):
1496 1509 """
1497 1510 Checks in-memory commit's integrity. Also, sets parents if not
1498 1511 already set.
1499 1512
1500 1513 :raises CommitError: if any error occurs (i.e.
1501 1514 ``NodeDoesNotExistError``).
1502 1515 """
1503 1516 if not self.parents:
1504 1517 parents = parents or []
1505 1518 if len(parents) == 0:
1506 1519 try:
1507 1520 parents = [self.repository.get_commit(), None]
1508 1521 except EmptyRepositoryError:
1509 1522 parents = [None, None]
1510 1523 elif len(parents) == 1:
1511 1524 parents += [None]
1512 1525 self.parents = parents
1513 1526
1514 1527 # Local parents, only if not None
1515 1528 parents = [p for p in self.parents if p]
1516 1529
1517 1530 # Check nodes marked as added
1518 1531 for p in parents:
1519 1532 for node in self.added:
1520 1533 try:
1521 1534 p.get_node(node.path)
1522 1535 except NodeDoesNotExistError:
1523 1536 pass
1524 1537 else:
1525 1538 raise NodeAlreadyExistsError(
1526 1539 "Node `%s` already exists at %s" % (node.path, p))
1527 1540
1528 1541 # Check nodes marked as changed
1529 1542 missing = set(self.changed)
1530 1543 not_changed = set(self.changed)
1531 1544 if self.changed and not parents:
1532 1545 raise NodeDoesNotExistError(str(self.changed[0].path))
1533 1546 for p in parents:
1534 1547 for node in self.changed:
1535 1548 try:
1536 1549 old = p.get_node(node.path)
1537 1550 missing.remove(node)
1538 1551 # if content actually changed, remove node from not_changed
1539 1552 if old.content != node.content:
1540 1553 not_changed.remove(node)
1541 1554 except NodeDoesNotExistError:
1542 1555 pass
1543 1556 if self.changed and missing:
1544 1557 raise NodeDoesNotExistError(
1545 1558 "Node `%s` marked as modified but missing in parents: %s"
1546 1559 % (node.path, parents))
1547 1560
1548 1561 if self.changed and not_changed:
1549 1562 raise NodeNotChangedError(
1550 1563 "Node `%s` wasn't actually changed (parents: %s)"
1551 1564 % (not_changed.pop().path, parents))
1552 1565
1553 1566 # Check nodes marked as removed
1554 1567 if self.removed and not parents:
1555 1568 raise NodeDoesNotExistError(
1556 1569 "Cannot remove node at %s as there "
1557 1570 "were no parents specified" % self.removed[0].path)
1558 1571 really_removed = set()
1559 1572 for p in parents:
1560 1573 for node in self.removed:
1561 1574 try:
1562 1575 p.get_node(node.path)
1563 1576 really_removed.add(node)
1564 1577 except CommitError:
1565 1578 pass
1566 1579 not_removed = set(self.removed) - really_removed
1567 1580 if not_removed:
1568 1581 # TODO: johbo: This code branch does not seem to be covered
1569 1582 raise NodeDoesNotExistError(
1570 1583 "Cannot remove node at %s from "
1571 1584 "following parents: %s" % (not_removed, parents))
1572 1585
1573 1586 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1574 1587 """
1575 1588 Performs in-memory commit (doesn't check workdir in any way) and
1576 1589 returns newly created :class:`BaseCommit`. Updates repository's
1577 1590 attribute `commits`.
1578 1591
1579 1592 .. note::
1580 1593
1581 1594 While overriding this method each backend's should call
1582 1595 ``self.check_integrity(parents)`` in the first place.
1583 1596
1584 1597 :param message: message of the commit
1585 1598 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1586 1599 :param parents: single parent or sequence of parents from which commit
1587 1600 would be derived
1588 1601 :param date: ``datetime.datetime`` instance. Defaults to
1589 1602 ``datetime.datetime.now()``.
1590 1603 :param branch: branch name, as string. If none given, default backend's
1591 1604 branch would be used.
1592 1605
1593 1606 :raises ``CommitError``: if any error occurs while committing
1594 1607 """
1595 1608 raise NotImplementedError
1596 1609
1597 1610
1598 1611 class BaseInMemoryChangesetClass(type):
1599 1612
1600 1613 def __instancecheck__(self, instance):
1601 1614 return isinstance(instance, BaseInMemoryCommit)
1602 1615
1603 1616
1604 1617 class BaseInMemoryChangeset(BaseInMemoryCommit):
1605 1618
1606 1619 __metaclass__ = BaseInMemoryChangesetClass
1607 1620
1608 1621 def __new__(cls, *args, **kwargs):
1609 1622 warnings.warn(
1610 1623 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1611 1624 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1612 1625
1613 1626
1614 1627 class EmptyCommit(BaseCommit):
1615 1628 """
1616 1629 An dummy empty commit. It's possible to pass hash when creating
1617 1630 an EmptyCommit
1618 1631 """
1619 1632
1620 1633 def __init__(
1621 1634 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1622 1635 message='', author='', date=None):
1623 1636 self._empty_commit_id = commit_id
1624 1637 # TODO: johbo: Solve idx parameter, default value does not make
1625 1638 # too much sense
1626 1639 self.idx = idx
1627 1640 self.message = message
1628 1641 self.author = author
1629 1642 self.date = date or datetime.datetime.fromtimestamp(0)
1630 1643 self.repository = repo
1631 1644 self.alias = alias
1632 1645
1633 1646 @LazyProperty
1634 1647 def raw_id(self):
1635 1648 """
1636 1649 Returns raw string identifying this commit, useful for web
1637 1650 representation.
1638 1651 """
1639 1652
1640 1653 return self._empty_commit_id
1641 1654
1642 1655 @LazyProperty
1643 1656 def branch(self):
1644 1657 if self.alias:
1645 1658 from rhodecode.lib.vcs.backends import get_backend
1646 1659 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1647 1660
1648 1661 @LazyProperty
1649 1662 def short_id(self):
1650 1663 return self.raw_id[:12]
1651 1664
1652 1665 @LazyProperty
1653 1666 def id(self):
1654 1667 return self.raw_id
1655 1668
1656 1669 def get_path_commit(self, path):
1657 1670 return self
1658 1671
1659 1672 def get_file_content(self, path):
1660 1673 return u''
1661 1674
1662 1675 def get_file_content_streamed(self, path):
1663 1676 yield self.get_file_content()
1664 1677
1665 1678 def get_file_size(self, path):
1666 1679 return 0
1667 1680
1668 1681
1669 1682 class EmptyChangesetClass(type):
1670 1683
1671 1684 def __instancecheck__(self, instance):
1672 1685 return isinstance(instance, EmptyCommit)
1673 1686
1674 1687
1675 1688 class EmptyChangeset(EmptyCommit):
1676 1689
1677 1690 __metaclass__ = EmptyChangesetClass
1678 1691
1679 1692 def __new__(cls, *args, **kwargs):
1680 1693 warnings.warn(
1681 1694 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1682 1695 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1683 1696
1684 1697 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1685 1698 alias=None, revision=-1, message='', author='', date=None):
1686 1699 if requested_revision is not None:
1687 1700 warnings.warn(
1688 1701 "Parameter requested_revision not supported anymore",
1689 1702 DeprecationWarning)
1690 1703 super(EmptyChangeset, self).__init__(
1691 1704 commit_id=cs, repo=repo, alias=alias, idx=revision,
1692 1705 message=message, author=author, date=date)
1693 1706
1694 1707 @property
1695 1708 def revision(self):
1696 1709 warnings.warn("Use idx instead", DeprecationWarning)
1697 1710 return self.idx
1698 1711
1699 1712 @revision.setter
1700 1713 def revision(self, value):
1701 1714 warnings.warn("Use idx instead", DeprecationWarning)
1702 1715 self.idx = value
1703 1716
1704 1717
1705 1718 class EmptyRepository(BaseRepository):
1706 1719 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1707 1720 pass
1708 1721
1709 1722 def get_diff(self, *args, **kwargs):
1710 1723 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1711 1724 return GitDiff('')
1712 1725
1713 1726
1714 1727 class CollectionGenerator(object):
1715 1728
1716 1729 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1717 1730 self.repo = repo
1718 1731 self.commit_ids = commit_ids
1719 1732 # TODO: (oliver) this isn't currently hooked up
1720 1733 self.collection_size = None
1721 1734 self.pre_load = pre_load
1722 1735 self.translate_tag = translate_tag
1723 1736
1724 1737 def __len__(self):
1725 1738 if self.collection_size is not None:
1726 1739 return self.collection_size
1727 1740 return self.commit_ids.__len__()
1728 1741
1729 1742 def __iter__(self):
1730 1743 for commit_id in self.commit_ids:
1731 1744 # TODO: johbo: Mercurial passes in commit indices or commit ids
1732 1745 yield self._commit_factory(commit_id)
1733 1746
1734 1747 def _commit_factory(self, commit_id):
1735 1748 """
1736 1749 Allows backends to override the way commits are generated.
1737 1750 """
1738 1751 return self.repo.get_commit(
1739 1752 commit_id=commit_id, pre_load=self.pre_load,
1740 1753 translate_tag=self.translate_tag)
1741 1754
1742 1755 def __getslice__(self, i, j):
1743 1756 """
1744 1757 Returns an iterator of sliced repository
1745 1758 """
1746 1759 commit_ids = self.commit_ids[i:j]
1747 1760 return self.__class__(
1748 1761 self.repo, commit_ids, pre_load=self.pre_load,
1749 1762 translate_tag=self.translate_tag)
1750 1763
1751 1764 def __repr__(self):
1752 1765 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1753 1766
1754 1767
1755 1768 class Config(object):
1756 1769 """
1757 1770 Represents the configuration for a repository.
1758 1771
1759 1772 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1760 1773 standard library. It implements only the needed subset.
1761 1774 """
1762 1775
1763 1776 def __init__(self):
1764 1777 self._values = {}
1765 1778
1766 1779 def copy(self):
1767 1780 clone = Config()
1768 1781 for section, values in self._values.items():
1769 1782 clone._values[section] = values.copy()
1770 1783 return clone
1771 1784
1772 1785 def __repr__(self):
1773 1786 return '<Config(%s sections) at %s>' % (
1774 1787 len(self._values), hex(id(self)))
1775 1788
1776 1789 def items(self, section):
1777 1790 return self._values.get(section, {}).iteritems()
1778 1791
1779 1792 def get(self, section, option):
1780 1793 return self._values.get(section, {}).get(option)
1781 1794
1782 1795 def set(self, section, option, value):
1783 1796 section_values = self._values.setdefault(section, {})
1784 1797 section_values[option] = value
1785 1798
1786 1799 def clear_section(self, section):
1787 1800 self._values[section] = {}
1788 1801
1789 1802 def serialize(self):
1790 1803 """
1791 1804 Creates a list of three tuples (section, key, value) representing
1792 1805 this config object.
1793 1806 """
1794 1807 items = []
1795 1808 for section in self._values:
1796 1809 for option, value in self._values[section].items():
1797 1810 items.append(
1798 1811 (safe_str(section), safe_str(option), safe_str(value)))
1799 1812 return items
1800 1813
1801 1814
1802 1815 class Diff(object):
1803 1816 """
1804 1817 Represents a diff result from a repository backend.
1805 1818
1806 1819 Subclasses have to provide a backend specific value for
1807 1820 :attr:`_header_re` and :attr:`_meta_re`.
1808 1821 """
1809 1822 _meta_re = None
1810 1823 _header_re = None
1811 1824
1812 1825 def __init__(self, raw_diff):
1813 1826 self.raw = raw_diff
1814 1827
1815 1828 def chunks(self):
1816 1829 """
1817 1830 split the diff in chunks of separate --git a/file b/file chunks
1818 1831 to make diffs consistent we must prepend with \n, and make sure
1819 1832 we can detect last chunk as this was also has special rule
1820 1833 """
1821 1834
1822 1835 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1823 1836 header = diff_parts[0]
1824 1837
1825 1838 if self._meta_re:
1826 1839 match = self._meta_re.match(header)
1827 1840
1828 1841 chunks = diff_parts[1:]
1829 1842 total_chunks = len(chunks)
1830 1843
1831 1844 return (
1832 1845 DiffChunk(chunk, self, cur_chunk == total_chunks)
1833 1846 for cur_chunk, chunk in enumerate(chunks, start=1))
1834 1847
1835 1848
1836 1849 class DiffChunk(object):
1837 1850
1838 1851 def __init__(self, chunk, diff, last_chunk):
1839 1852 self._diff = diff
1840 1853
1841 1854 # since we split by \ndiff --git that part is lost from original diff
1842 1855 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1843 1856 if not last_chunk:
1844 1857 chunk += '\n'
1845 1858
1846 1859 match = self._diff._header_re.match(chunk)
1847 1860 self.header = match.groupdict()
1848 1861 self.diff = chunk[match.end():]
1849 1862 self.raw = chunk
1850 1863
1851 1864
1852 1865 class BasePathPermissionChecker(object):
1853 1866
1854 1867 @staticmethod
1855 1868 def create_from_patterns(includes, excludes):
1856 1869 if includes and '*' in includes and not excludes:
1857 1870 return AllPathPermissionChecker()
1858 1871 elif excludes and '*' in excludes:
1859 1872 return NonePathPermissionChecker()
1860 1873 else:
1861 1874 return PatternPathPermissionChecker(includes, excludes)
1862 1875
1863 1876 @property
1864 1877 def has_full_access(self):
1865 1878 raise NotImplemented()
1866 1879
1867 1880 def has_access(self, path):
1868 1881 raise NotImplemented()
1869 1882
1870 1883
1871 1884 class AllPathPermissionChecker(BasePathPermissionChecker):
1872 1885
1873 1886 @property
1874 1887 def has_full_access(self):
1875 1888 return True
1876 1889
1877 1890 def has_access(self, path):
1878 1891 return True
1879 1892
1880 1893
1881 1894 class NonePathPermissionChecker(BasePathPermissionChecker):
1882 1895
1883 1896 @property
1884 1897 def has_full_access(self):
1885 1898 return False
1886 1899
1887 1900 def has_access(self, path):
1888 1901 return False
1889 1902
1890 1903
1891 1904 class PatternPathPermissionChecker(BasePathPermissionChecker):
1892 1905
1893 1906 def __init__(self, includes, excludes):
1894 1907 self.includes = includes
1895 1908 self.excludes = excludes
1896 1909 self.includes_re = [] if not includes else [
1897 1910 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1898 1911 self.excludes_re = [] if not excludes else [
1899 1912 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1900 1913
1901 1914 @property
1902 1915 def has_full_access(self):
1903 1916 return '*' in self.includes and not self.excludes
1904 1917
1905 1918 def has_access(self, path):
1906 1919 for regex in self.excludes_re:
1907 1920 if regex.match(path):
1908 1921 return False
1909 1922 for regex in self.includes_re:
1910 1923 if regex.match(path):
1911 1924 return True
1912 1925 return False
@@ -1,2205 +1,2205 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 log.debug('Obtaining commit authors from set of commits')
158 158 commits = target_scm.compare(
159 159 target_ref, source_ref, source_scm, merge=True,
160 160 pre_load=["author", "date", "message", "branch", "parents"])
161 161
162 162 for commit in commits:
163 163 user = User.get_from_cs_author(commit.author)
164 164 if user and user not in commit_authors:
165 165 commit_authors.append(user)
166 166
167 167 # lines
168 168 if get_authors:
169 169 log.debug('Calculating authors of changed files')
170 170 target_commit = source_repo.get_commit(ancestor_id)
171 171
172 172 for fname, lines in changed_lines.items():
173 173 try:
174 174 node = target_commit.get_node(fname)
175 175 except Exception:
176 176 continue
177 177
178 178 if not isinstance(node, FileNode):
179 179 continue
180 180
181 181 for annotation in node.annotate:
182 182 line_no, commit_id, get_commit_func, line_text = annotation
183 183 if line_no in lines:
184 184 if commit_id not in _commit_cache:
185 185 _commit_cache[commit_id] = get_commit_func()
186 186 commit = _commit_cache[commit_id]
187 187 author = commit.author
188 188 email = commit.author_email
189 189 user = User.get_from_cs_author(author)
190 190 if user:
191 191 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
192 192 author_counts[author] = author_counts.get(author, 0) + 1
193 193 email_counts[email] = email_counts.get(email, 0) + 1
194 194
195 195 log.debug('Default reviewers processing finished')
196 196
197 197 return {
198 198 'commits': commits,
199 199 'files': all_files_changes,
200 200 'stats': stats,
201 201 'ancestor': ancestor_id,
202 202 # original authors of modified files
203 203 'original_authors': {
204 204 'users': user_counts,
205 205 'authors': author_counts,
206 206 'emails': email_counts,
207 207 },
208 208 'commit_authors': commit_authors
209 209 }
210 210
211 211
212 212 class PullRequestModel(BaseModel):
213 213
214 214 cls = PullRequest
215 215
216 216 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
217 217
218 218 UPDATE_STATUS_MESSAGES = {
219 219 UpdateFailureReason.NONE: lazy_ugettext(
220 220 'Pull request update successful.'),
221 221 UpdateFailureReason.UNKNOWN: lazy_ugettext(
222 222 'Pull request update failed because of an unknown error.'),
223 223 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
224 224 'No update needed because the source and target have not changed.'),
225 225 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
226 226 'Pull request cannot be updated because the reference type is '
227 227 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
228 228 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
229 229 'This pull request cannot be updated because the target '
230 230 'reference is missing.'),
231 231 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
232 232 'This pull request cannot be updated because the source '
233 233 'reference is missing.'),
234 234 }
235 235 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
236 236 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
237 237
238 238 def __get_pull_request(self, pull_request):
239 239 return self._get_instance((
240 240 PullRequest, PullRequestVersion), pull_request)
241 241
242 242 def _check_perms(self, perms, pull_request, user, api=False):
243 243 if not api:
244 244 return h.HasRepoPermissionAny(*perms)(
245 245 user=user, repo_name=pull_request.target_repo.repo_name)
246 246 else:
247 247 return h.HasRepoPermissionAnyApi(*perms)(
248 248 user=user, repo_name=pull_request.target_repo.repo_name)
249 249
250 250 def check_user_read(self, pull_request, user, api=False):
251 251 _perms = ('repository.admin', 'repository.write', 'repository.read',)
252 252 return self._check_perms(_perms, pull_request, user, api)
253 253
254 254 def check_user_merge(self, pull_request, user, api=False):
255 255 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
256 256 return self._check_perms(_perms, pull_request, user, api)
257 257
258 258 def check_user_update(self, pull_request, user, api=False):
259 259 owner = user.user_id == pull_request.user_id
260 260 return self.check_user_merge(pull_request, user, api) or owner
261 261
262 262 def check_user_delete(self, pull_request, user):
263 263 owner = user.user_id == pull_request.user_id
264 264 _perms = ('repository.admin',)
265 265 return self._check_perms(_perms, pull_request, user) or owner
266 266
267 267 def check_user_change_status(self, pull_request, user, api=False):
268 268 reviewer = user.user_id in [x.user_id for x in
269 269 pull_request.reviewers]
270 270 return self.check_user_update(pull_request, user, api) or reviewer
271 271
272 272 def check_user_comment(self, pull_request, user):
273 273 owner = user.user_id == pull_request.user_id
274 274 return self.check_user_read(pull_request, user) or owner
275 275
276 276 def get(self, pull_request):
277 277 return self.__get_pull_request(pull_request)
278 278
279 279 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
280 280 statuses=None, opened_by=None, order_by=None,
281 281 order_dir='desc', only_created=False):
282 282 repo = None
283 283 if repo_name:
284 284 repo = self._get_repo(repo_name)
285 285
286 286 q = PullRequest.query()
287 287
288 288 if search_q:
289 289 like_expression = u'%{}%'.format(safe_unicode(search_q))
290 290 q = q.join(User)
291 291 q = q.filter(or_(
292 292 cast(PullRequest.pull_request_id, String).ilike(like_expression),
293 293 User.username.ilike(like_expression),
294 294 PullRequest.title.ilike(like_expression),
295 295 PullRequest.description.ilike(like_expression),
296 296 ))
297 297
298 298 # source or target
299 299 if repo and source:
300 300 q = q.filter(PullRequest.source_repo == repo)
301 301 elif repo:
302 302 q = q.filter(PullRequest.target_repo == repo)
303 303
304 304 # closed,opened
305 305 if statuses:
306 306 q = q.filter(PullRequest.status.in_(statuses))
307 307
308 308 # opened by filter
309 309 if opened_by:
310 310 q = q.filter(PullRequest.user_id.in_(opened_by))
311 311
312 312 # only get those that are in "created" state
313 313 if only_created:
314 314 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
315 315
316 316 if order_by:
317 317 order_map = {
318 318 'name_raw': PullRequest.pull_request_id,
319 319 'id': PullRequest.pull_request_id,
320 320 'title': PullRequest.title,
321 321 'updated_on_raw': PullRequest.updated_on,
322 322 'target_repo': PullRequest.target_repo_id
323 323 }
324 324 if order_dir == 'asc':
325 325 q = q.order_by(order_map[order_by].asc())
326 326 else:
327 327 q = q.order_by(order_map[order_by].desc())
328 328
329 329 return q
330 330
331 331 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
332 332 opened_by=None):
333 333 """
334 334 Count the number of pull requests for a specific repository.
335 335
336 336 :param repo_name: target or source repo
337 337 :param search_q: filter by text
338 338 :param source: boolean flag to specify if repo_name refers to source
339 339 :param statuses: list of pull request statuses
340 340 :param opened_by: author user of the pull request
341 341 :returns: int number of pull requests
342 342 """
343 343 q = self._prepare_get_all_query(
344 344 repo_name, search_q=search_q, source=source, statuses=statuses,
345 345 opened_by=opened_by)
346 346
347 347 return q.count()
348 348
349 349 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
350 350 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
351 351 """
352 352 Get all pull requests for a specific repository.
353 353
354 354 :param repo_name: target or source repo
355 355 :param search_q: filter by text
356 356 :param source: boolean flag to specify if repo_name refers to source
357 357 :param statuses: list of pull request statuses
358 358 :param opened_by: author user of the pull request
359 359 :param offset: pagination offset
360 360 :param length: length of returned list
361 361 :param order_by: order of the returned list
362 362 :param order_dir: 'asc' or 'desc' ordering direction
363 363 :returns: list of pull requests
364 364 """
365 365 q = self._prepare_get_all_query(
366 366 repo_name, search_q=search_q, source=source, statuses=statuses,
367 367 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
368 368
369 369 if length:
370 370 pull_requests = q.limit(length).offset(offset).all()
371 371 else:
372 372 pull_requests = q.all()
373 373
374 374 return pull_requests
375 375
376 376 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
377 377 opened_by=None):
378 378 """
379 379 Count the number of pull requests for a specific repository that are
380 380 awaiting review.
381 381
382 382 :param repo_name: target or source repo
383 383 :param search_q: filter by text
384 384 :param source: boolean flag to specify if repo_name refers to source
385 385 :param statuses: list of pull request statuses
386 386 :param opened_by: author user of the pull request
387 387 :returns: int number of pull requests
388 388 """
389 389 pull_requests = self.get_awaiting_review(
390 390 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
391 391
392 392 return len(pull_requests)
393 393
394 394 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
395 395 opened_by=None, offset=0, length=None,
396 396 order_by=None, order_dir='desc'):
397 397 """
398 398 Get all pull requests for a specific repository that are awaiting
399 399 review.
400 400
401 401 :param repo_name: target or source repo
402 402 :param search_q: filter by text
403 403 :param source: boolean flag to specify if repo_name refers to source
404 404 :param statuses: list of pull request statuses
405 405 :param opened_by: author user of the pull request
406 406 :param offset: pagination offset
407 407 :param length: length of returned list
408 408 :param order_by: order of the returned list
409 409 :param order_dir: 'asc' or 'desc' ordering direction
410 410 :returns: list of pull requests
411 411 """
412 412 pull_requests = self.get_all(
413 413 repo_name, search_q=search_q, source=source, statuses=statuses,
414 414 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
415 415
416 416 _filtered_pull_requests = []
417 417 for pr in pull_requests:
418 418 status = pr.calculated_review_status()
419 419 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
420 420 ChangesetStatus.STATUS_UNDER_REVIEW]:
421 421 _filtered_pull_requests.append(pr)
422 422 if length:
423 423 return _filtered_pull_requests[offset:offset+length]
424 424 else:
425 425 return _filtered_pull_requests
426 426
427 427 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
428 428 opened_by=None, user_id=None):
429 429 """
430 430 Count the number of pull requests for a specific repository that are
431 431 awaiting review from a specific user.
432 432
433 433 :param repo_name: target or source repo
434 434 :param search_q: filter by text
435 435 :param source: boolean flag to specify if repo_name refers to source
436 436 :param statuses: list of pull request statuses
437 437 :param opened_by: author user of the pull request
438 438 :param user_id: reviewer user of the pull request
439 439 :returns: int number of pull requests
440 440 """
441 441 pull_requests = self.get_awaiting_my_review(
442 442 repo_name, search_q=search_q, source=source, statuses=statuses,
443 443 opened_by=opened_by, user_id=user_id)
444 444
445 445 return len(pull_requests)
446 446
447 447 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
448 448 opened_by=None, user_id=None, offset=0,
449 449 length=None, order_by=None, order_dir='desc'):
450 450 """
451 451 Get all pull requests for a specific repository that are awaiting
452 452 review from a specific user.
453 453
454 454 :param repo_name: target or source repo
455 455 :param search_q: filter by text
456 456 :param source: boolean flag to specify if repo_name refers to source
457 457 :param statuses: list of pull request statuses
458 458 :param opened_by: author user of the pull request
459 459 :param user_id: reviewer user of the pull request
460 460 :param offset: pagination offset
461 461 :param length: length of returned list
462 462 :param order_by: order of the returned list
463 463 :param order_dir: 'asc' or 'desc' ordering direction
464 464 :returns: list of pull requests
465 465 """
466 466 pull_requests = self.get_all(
467 467 repo_name, search_q=search_q, source=source, statuses=statuses,
468 468 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
469 469
470 470 _my = PullRequestModel().get_not_reviewed(user_id)
471 471 my_participation = []
472 472 for pr in pull_requests:
473 473 if pr in _my:
474 474 my_participation.append(pr)
475 475 _filtered_pull_requests = my_participation
476 476 if length:
477 477 return _filtered_pull_requests[offset:offset+length]
478 478 else:
479 479 return _filtered_pull_requests
480 480
481 481 def get_not_reviewed(self, user_id):
482 482 return [
483 483 x.pull_request for x in PullRequestReviewers.query().filter(
484 484 PullRequestReviewers.user_id == user_id).all()
485 485 ]
486 486
487 487 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
488 488 order_by=None, order_dir='desc'):
489 489 q = PullRequest.query()
490 490 if user_id:
491 491 reviewers_subquery = Session().query(
492 492 PullRequestReviewers.pull_request_id).filter(
493 493 PullRequestReviewers.user_id == user_id).subquery()
494 494 user_filter = or_(
495 495 PullRequest.user_id == user_id,
496 496 PullRequest.pull_request_id.in_(reviewers_subquery)
497 497 )
498 498 q = PullRequest.query().filter(user_filter)
499 499
500 500 # closed,opened
501 501 if statuses:
502 502 q = q.filter(PullRequest.status.in_(statuses))
503 503
504 504 if query:
505 505 like_expression = u'%{}%'.format(safe_unicode(query))
506 506 q = q.join(User)
507 507 q = q.filter(or_(
508 508 cast(PullRequest.pull_request_id, String).ilike(like_expression),
509 509 User.username.ilike(like_expression),
510 510 PullRequest.title.ilike(like_expression),
511 511 PullRequest.description.ilike(like_expression),
512 512 ))
513 513 if order_by:
514 514 order_map = {
515 515 'name_raw': PullRequest.pull_request_id,
516 516 'title': PullRequest.title,
517 517 'updated_on_raw': PullRequest.updated_on,
518 518 'target_repo': PullRequest.target_repo_id
519 519 }
520 520 if order_dir == 'asc':
521 521 q = q.order_by(order_map[order_by].asc())
522 522 else:
523 523 q = q.order_by(order_map[order_by].desc())
524 524
525 525 return q
526 526
527 527 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
528 528 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
529 529 return q.count()
530 530
531 531 def get_im_participating_in(
532 532 self, user_id=None, statuses=None, query='', offset=0,
533 533 length=None, order_by=None, order_dir='desc'):
534 534 """
535 535 Get all Pull requests that i'm participating in, or i have opened
536 536 """
537 537
538 538 q = self._prepare_participating_query(
539 539 user_id, statuses=statuses, query=query, order_by=order_by,
540 540 order_dir=order_dir)
541 541
542 542 if length:
543 543 pull_requests = q.limit(length).offset(offset).all()
544 544 else:
545 545 pull_requests = q.all()
546 546
547 547 return pull_requests
548 548
549 549 def get_versions(self, pull_request):
550 550 """
551 551 returns version of pull request sorted by ID descending
552 552 """
553 553 return PullRequestVersion.query()\
554 554 .filter(PullRequestVersion.pull_request == pull_request)\
555 555 .order_by(PullRequestVersion.pull_request_version_id.asc())\
556 556 .all()
557 557
558 558 def get_pr_version(self, pull_request_id, version=None):
559 559 at_version = None
560 560
561 561 if version and version == 'latest':
562 562 pull_request_ver = PullRequest.get(pull_request_id)
563 563 pull_request_obj = pull_request_ver
564 564 _org_pull_request_obj = pull_request_obj
565 565 at_version = 'latest'
566 566 elif version:
567 567 pull_request_ver = PullRequestVersion.get_or_404(version)
568 568 pull_request_obj = pull_request_ver
569 569 _org_pull_request_obj = pull_request_ver.pull_request
570 570 at_version = pull_request_ver.pull_request_version_id
571 571 else:
572 572 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
573 573 pull_request_id)
574 574
575 575 pull_request_display_obj = PullRequest.get_pr_display_object(
576 576 pull_request_obj, _org_pull_request_obj)
577 577
578 578 return _org_pull_request_obj, pull_request_obj, \
579 579 pull_request_display_obj, at_version
580 580
581 581 def create(self, created_by, source_repo, source_ref, target_repo,
582 582 target_ref, revisions, reviewers, observers, title, description=None,
583 583 common_ancestor_id=None,
584 584 description_renderer=None,
585 585 reviewer_data=None, translator=None, auth_user=None):
586 586 translator = translator or get_current_request().translate
587 587
588 588 created_by_user = self._get_user(created_by)
589 589 auth_user = auth_user or created_by_user.AuthUser()
590 590 source_repo = self._get_repo(source_repo)
591 591 target_repo = self._get_repo(target_repo)
592 592
593 593 pull_request = PullRequest()
594 594 pull_request.source_repo = source_repo
595 595 pull_request.source_ref = source_ref
596 596 pull_request.target_repo = target_repo
597 597 pull_request.target_ref = target_ref
598 598 pull_request.revisions = revisions
599 599 pull_request.title = title
600 600 pull_request.description = description
601 601 pull_request.description_renderer = description_renderer
602 602 pull_request.author = created_by_user
603 603 pull_request.reviewer_data = reviewer_data
604 604 pull_request.pull_request_state = pull_request.STATE_CREATING
605 605 pull_request.common_ancestor_id = common_ancestor_id
606 606
607 607 Session().add(pull_request)
608 608 Session().flush()
609 609
610 610 reviewer_ids = set()
611 611 # members / reviewers
612 612 for reviewer_object in reviewers:
613 613 user_id, reasons, mandatory, role, rules = reviewer_object
614 614 user = self._get_user(user_id)
615 615
616 616 # skip duplicates
617 617 if user.user_id in reviewer_ids:
618 618 continue
619 619
620 620 reviewer_ids.add(user.user_id)
621 621
622 622 reviewer = PullRequestReviewers()
623 623 reviewer.user = user
624 624 reviewer.pull_request = pull_request
625 625 reviewer.reasons = reasons
626 626 reviewer.mandatory = mandatory
627 627 reviewer.role = role
628 628
629 629 # NOTE(marcink): pick only first rule for now
630 630 rule_id = list(rules)[0] if rules else None
631 631 rule = RepoReviewRule.get(rule_id) if rule_id else None
632 632 if rule:
633 633 review_group = rule.user_group_vote_rule(user_id)
634 634 # we check if this particular reviewer is member of a voting group
635 635 if review_group:
636 636 # NOTE(marcink):
637 637 # can be that user is member of more but we pick the first same,
638 638 # same as default reviewers algo
639 639 review_group = review_group[0]
640 640
641 641 rule_data = {
642 642 'rule_name':
643 643 rule.review_rule_name,
644 644 'rule_user_group_entry_id':
645 645 review_group.repo_review_rule_users_group_id,
646 646 'rule_user_group_name':
647 647 review_group.users_group.users_group_name,
648 648 'rule_user_group_members':
649 649 [x.user.username for x in review_group.users_group.members],
650 650 'rule_user_group_members_id':
651 651 [x.user.user_id for x in review_group.users_group.members],
652 652 }
653 653 # e.g {'vote_rule': -1, 'mandatory': True}
654 654 rule_data.update(review_group.rule_data())
655 655
656 656 reviewer.rule_data = rule_data
657 657
658 658 Session().add(reviewer)
659 659 Session().flush()
660 660
661 661 for observer_object in observers:
662 662 user_id, reasons, mandatory, role, rules = observer_object
663 663 user = self._get_user(user_id)
664 664
665 665 # skip duplicates from reviewers
666 666 if user.user_id in reviewer_ids:
667 667 continue
668 668
669 669 #reviewer_ids.add(user.user_id)
670 670
671 671 observer = PullRequestReviewers()
672 672 observer.user = user
673 673 observer.pull_request = pull_request
674 674 observer.reasons = reasons
675 675 observer.mandatory = mandatory
676 676 observer.role = role
677 677
678 678 # NOTE(marcink): pick only first rule for now
679 679 rule_id = list(rules)[0] if rules else None
680 680 rule = RepoReviewRule.get(rule_id) if rule_id else None
681 681 if rule:
682 682 # TODO(marcink): do we need this for observers ??
683 683 pass
684 684
685 685 Session().add(observer)
686 686 Session().flush()
687 687
688 688 # Set approval status to "Under Review" for all commits which are
689 689 # part of this pull request.
690 690 ChangesetStatusModel().set_status(
691 691 repo=target_repo,
692 692 status=ChangesetStatus.STATUS_UNDER_REVIEW,
693 693 user=created_by_user,
694 694 pull_request=pull_request
695 695 )
696 696 # we commit early at this point. This has to do with a fact
697 697 # that before queries do some row-locking. And because of that
698 698 # we need to commit and finish transaction before below validate call
699 699 # that for large repos could be long resulting in long row locks
700 700 Session().commit()
701 701
702 702 # prepare workspace, and run initial merge simulation. Set state during that
703 703 # operation
704 704 pull_request = PullRequest.get(pull_request.pull_request_id)
705 705
706 706 # set as merging, for merge simulation, and if finished to created so we mark
707 707 # simulation is working fine
708 708 with pull_request.set_state(PullRequest.STATE_MERGING,
709 709 final_state=PullRequest.STATE_CREATED) as state_obj:
710 710 MergeCheck.validate(
711 711 pull_request, auth_user=auth_user, translator=translator)
712 712
713 self.notify_reviewers(pull_request, reviewer_ids)
713 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
714 714 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
715 715
716 716 creation_data = pull_request.get_api_data(with_merge_state=False)
717 717 self._log_audit_action(
718 718 'repo.pull_request.create', {'data': creation_data},
719 719 auth_user, pull_request)
720 720
721 721 return pull_request
722 722
723 723 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
724 724 pull_request = self.__get_pull_request(pull_request)
725 725 target_scm = pull_request.target_repo.scm_instance()
726 726 if action == 'create':
727 727 trigger_hook = hooks_utils.trigger_create_pull_request_hook
728 728 elif action == 'merge':
729 729 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
730 730 elif action == 'close':
731 731 trigger_hook = hooks_utils.trigger_close_pull_request_hook
732 732 elif action == 'review_status_change':
733 733 trigger_hook = hooks_utils.trigger_review_pull_request_hook
734 734 elif action == 'update':
735 735 trigger_hook = hooks_utils.trigger_update_pull_request_hook
736 736 elif action == 'comment':
737 737 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
738 738 elif action == 'comment_edit':
739 739 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
740 740 else:
741 741 return
742 742
743 743 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
744 744 pull_request, action, trigger_hook)
745 745 trigger_hook(
746 746 username=user.username,
747 747 repo_name=pull_request.target_repo.repo_name,
748 748 repo_type=target_scm.alias,
749 749 pull_request=pull_request,
750 750 data=data)
751 751
752 752 def _get_commit_ids(self, pull_request):
753 753 """
754 754 Return the commit ids of the merged pull request.
755 755
756 756 This method is not dealing correctly yet with the lack of autoupdates
757 757 nor with the implicit target updates.
758 758 For example: if a commit in the source repo is already in the target it
759 759 will be reported anyways.
760 760 """
761 761 merge_rev = pull_request.merge_rev
762 762 if merge_rev is None:
763 763 raise ValueError('This pull request was not merged yet')
764 764
765 765 commit_ids = list(pull_request.revisions)
766 766 if merge_rev not in commit_ids:
767 767 commit_ids.append(merge_rev)
768 768
769 769 return commit_ids
770 770
771 771 def merge_repo(self, pull_request, user, extras):
772 772 log.debug("Merging pull request %s", pull_request.pull_request_id)
773 773 extras['user_agent'] = 'internal-merge'
774 774 merge_state = self._merge_pull_request(pull_request, user, extras)
775 775 if merge_state.executed:
776 776 log.debug("Merge was successful, updating the pull request comments.")
777 777 self._comment_and_close_pr(pull_request, user, merge_state)
778 778
779 779 self._log_audit_action(
780 780 'repo.pull_request.merge',
781 781 {'merge_state': merge_state.__dict__},
782 782 user, pull_request)
783 783
784 784 else:
785 785 log.warn("Merge failed, not updating the pull request.")
786 786 return merge_state
787 787
788 788 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
789 789 target_vcs = pull_request.target_repo.scm_instance()
790 790 source_vcs = pull_request.source_repo.scm_instance()
791 791
792 792 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
793 793 pr_id=pull_request.pull_request_id,
794 794 pr_title=pull_request.title,
795 795 source_repo=source_vcs.name,
796 796 source_ref_name=pull_request.source_ref_parts.name,
797 797 target_repo=target_vcs.name,
798 798 target_ref_name=pull_request.target_ref_parts.name,
799 799 )
800 800
801 801 workspace_id = self._workspace_id(pull_request)
802 802 repo_id = pull_request.target_repo.repo_id
803 803 use_rebase = self._use_rebase_for_merging(pull_request)
804 804 close_branch = self._close_branch_before_merging(pull_request)
805 805 user_name = self._user_name_for_merging(pull_request, user)
806 806
807 807 target_ref = self._refresh_reference(
808 808 pull_request.target_ref_parts, target_vcs)
809 809
810 810 callback_daemon, extras = prepare_callback_daemon(
811 811 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
812 812 host=vcs_settings.HOOKS_HOST,
813 813 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
814 814
815 815 with callback_daemon:
816 816 # TODO: johbo: Implement a clean way to run a config_override
817 817 # for a single call.
818 818 target_vcs.config.set(
819 819 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
820 820
821 821 merge_state = target_vcs.merge(
822 822 repo_id, workspace_id, target_ref, source_vcs,
823 823 pull_request.source_ref_parts,
824 824 user_name=user_name, user_email=user.email,
825 825 message=message, use_rebase=use_rebase,
826 826 close_branch=close_branch)
827 827 return merge_state
828 828
829 829 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
830 830 pull_request.merge_rev = merge_state.merge_ref.commit_id
831 831 pull_request.updated_on = datetime.datetime.now()
832 832 close_msg = close_msg or 'Pull request merged and closed'
833 833
834 834 CommentsModel().create(
835 835 text=safe_unicode(close_msg),
836 836 repo=pull_request.target_repo.repo_id,
837 837 user=user.user_id,
838 838 pull_request=pull_request.pull_request_id,
839 839 f_path=None,
840 840 line_no=None,
841 841 closing_pr=True
842 842 )
843 843
844 844 Session().add(pull_request)
845 845 Session().flush()
846 846 # TODO: paris: replace invalidation with less radical solution
847 847 ScmModel().mark_for_invalidation(
848 848 pull_request.target_repo.repo_name)
849 849 self.trigger_pull_request_hook(pull_request, user, 'merge')
850 850
851 851 def has_valid_update_type(self, pull_request):
852 852 source_ref_type = pull_request.source_ref_parts.type
853 853 return source_ref_type in self.REF_TYPES
854 854
855 855 def get_flow_commits(self, pull_request):
856 856
857 857 # source repo
858 858 source_ref_name = pull_request.source_ref_parts.name
859 859 source_ref_type = pull_request.source_ref_parts.type
860 860 source_ref_id = pull_request.source_ref_parts.commit_id
861 861 source_repo = pull_request.source_repo.scm_instance()
862 862
863 863 try:
864 864 if source_ref_type in self.REF_TYPES:
865 865 source_commit = source_repo.get_commit(source_ref_name)
866 866 else:
867 867 source_commit = source_repo.get_commit(source_ref_id)
868 868 except CommitDoesNotExistError:
869 869 raise SourceRefMissing()
870 870
871 871 # target repo
872 872 target_ref_name = pull_request.target_ref_parts.name
873 873 target_ref_type = pull_request.target_ref_parts.type
874 874 target_ref_id = pull_request.target_ref_parts.commit_id
875 875 target_repo = pull_request.target_repo.scm_instance()
876 876
877 877 try:
878 878 if target_ref_type in self.REF_TYPES:
879 879 target_commit = target_repo.get_commit(target_ref_name)
880 880 else:
881 881 target_commit = target_repo.get_commit(target_ref_id)
882 882 except CommitDoesNotExistError:
883 883 raise TargetRefMissing()
884 884
885 885 return source_commit, target_commit
886 886
887 887 def update_commits(self, pull_request, updating_user):
888 888 """
889 889 Get the updated list of commits for the pull request
890 890 and return the new pull request version and the list
891 891 of commits processed by this update action
892 892
893 893 updating_user is the user_object who triggered the update
894 894 """
895 895 pull_request = self.__get_pull_request(pull_request)
896 896 source_ref_type = pull_request.source_ref_parts.type
897 897 source_ref_name = pull_request.source_ref_parts.name
898 898 source_ref_id = pull_request.source_ref_parts.commit_id
899 899
900 900 target_ref_type = pull_request.target_ref_parts.type
901 901 target_ref_name = pull_request.target_ref_parts.name
902 902 target_ref_id = pull_request.target_ref_parts.commit_id
903 903
904 904 if not self.has_valid_update_type(pull_request):
905 905 log.debug("Skipping update of pull request %s due to ref type: %s",
906 906 pull_request, source_ref_type)
907 907 return UpdateResponse(
908 908 executed=False,
909 909 reason=UpdateFailureReason.WRONG_REF_TYPE,
910 910 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
911 911 source_changed=False, target_changed=False)
912 912
913 913 try:
914 914 source_commit, target_commit = self.get_flow_commits(pull_request)
915 915 except SourceRefMissing:
916 916 return UpdateResponse(
917 917 executed=False,
918 918 reason=UpdateFailureReason.MISSING_SOURCE_REF,
919 919 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
920 920 source_changed=False, target_changed=False)
921 921 except TargetRefMissing:
922 922 return UpdateResponse(
923 923 executed=False,
924 924 reason=UpdateFailureReason.MISSING_TARGET_REF,
925 925 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
926 926 source_changed=False, target_changed=False)
927 927
928 928 source_changed = source_ref_id != source_commit.raw_id
929 929 target_changed = target_ref_id != target_commit.raw_id
930 930
931 931 if not (source_changed or target_changed):
932 932 log.debug("Nothing changed in pull request %s", pull_request)
933 933 return UpdateResponse(
934 934 executed=False,
935 935 reason=UpdateFailureReason.NO_CHANGE,
936 936 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
937 937 source_changed=target_changed, target_changed=source_changed)
938 938
939 939 change_in_found = 'target repo' if target_changed else 'source repo'
940 940 log.debug('Updating pull request because of change in %s detected',
941 941 change_in_found)
942 942
943 943 # Finally there is a need for an update, in case of source change
944 944 # we create a new version, else just an update
945 945 if source_changed:
946 946 pull_request_version = self._create_version_from_snapshot(pull_request)
947 947 self._link_comments_to_version(pull_request_version)
948 948 else:
949 949 try:
950 950 ver = pull_request.versions[-1]
951 951 except IndexError:
952 952 ver = None
953 953
954 954 pull_request.pull_request_version_id = \
955 955 ver.pull_request_version_id if ver else None
956 956 pull_request_version = pull_request
957 957
958 958 source_repo = pull_request.source_repo.scm_instance()
959 959 target_repo = pull_request.target_repo.scm_instance()
960 960
961 961 # re-compute commit ids
962 962 old_commit_ids = pull_request.revisions
963 963 pre_load = ["author", "date", "message", "branch"]
964 964 commit_ranges = target_repo.compare(
965 965 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
966 966 pre_load=pre_load)
967 967
968 968 target_ref = target_commit.raw_id
969 969 source_ref = source_commit.raw_id
970 970 ancestor_commit_id = target_repo.get_common_ancestor(
971 971 target_ref, source_ref, source_repo)
972 972
973 973 if not ancestor_commit_id:
974 974 raise ValueError(
975 975 'cannot calculate diff info without a common ancestor. '
976 976 'Make sure both repositories are related, and have a common forking commit.')
977 977
978 978 pull_request.common_ancestor_id = ancestor_commit_id
979 979
980 980 pull_request.source_ref = '%s:%s:%s' % (
981 981 source_ref_type, source_ref_name, source_commit.raw_id)
982 982 pull_request.target_ref = '%s:%s:%s' % (
983 983 target_ref_type, target_ref_name, ancestor_commit_id)
984 984
985 985 pull_request.revisions = [
986 986 commit.raw_id for commit in reversed(commit_ranges)]
987 987 pull_request.updated_on = datetime.datetime.now()
988 988 Session().add(pull_request)
989 989 new_commit_ids = pull_request.revisions
990 990
991 991 old_diff_data, new_diff_data = self._generate_update_diffs(
992 992 pull_request, pull_request_version)
993 993
994 994 # calculate commit and file changes
995 995 commit_changes = self._calculate_commit_id_changes(
996 996 old_commit_ids, new_commit_ids)
997 997 file_changes = self._calculate_file_changes(
998 998 old_diff_data, new_diff_data)
999 999
1000 1000 # set comments as outdated if DIFFS changed
1001 1001 CommentsModel().outdate_comments(
1002 1002 pull_request, old_diff_data=old_diff_data,
1003 1003 new_diff_data=new_diff_data)
1004 1004
1005 1005 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1006 1006 file_node_changes = (
1007 1007 file_changes.added or file_changes.modified or file_changes.removed)
1008 1008 pr_has_changes = valid_commit_changes or file_node_changes
1009 1009
1010 1010 # Add an automatic comment to the pull request, in case
1011 1011 # anything has changed
1012 1012 if pr_has_changes:
1013 1013 update_comment = CommentsModel().create(
1014 1014 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1015 1015 repo=pull_request.target_repo,
1016 1016 user=pull_request.author,
1017 1017 pull_request=pull_request,
1018 1018 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1019 1019
1020 1020 # Update status to "Under Review" for added commits
1021 1021 for commit_id in commit_changes.added:
1022 1022 ChangesetStatusModel().set_status(
1023 1023 repo=pull_request.source_repo,
1024 1024 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1025 1025 comment=update_comment,
1026 1026 user=pull_request.author,
1027 1027 pull_request=pull_request,
1028 1028 revision=commit_id)
1029 1029
1030 1030 # send update email to users
1031 1031 try:
1032 1032 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1033 1033 ancestor_commit_id=ancestor_commit_id,
1034 1034 commit_changes=commit_changes,
1035 1035 file_changes=file_changes)
1036 1036 except Exception:
1037 1037 log.exception('Failed to send email notification to users')
1038 1038
1039 1039 log.debug(
1040 1040 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1041 1041 'removed_ids: %s', pull_request.pull_request_id,
1042 1042 commit_changes.added, commit_changes.common, commit_changes.removed)
1043 1043 log.debug(
1044 1044 'Updated pull request with the following file changes: %s',
1045 1045 file_changes)
1046 1046
1047 1047 log.info(
1048 1048 "Updated pull request %s from commit %s to commit %s, "
1049 1049 "stored new version %s of this pull request.",
1050 1050 pull_request.pull_request_id, source_ref_id,
1051 1051 pull_request.source_ref_parts.commit_id,
1052 1052 pull_request_version.pull_request_version_id)
1053 1053 Session().commit()
1054 1054 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1055 1055
1056 1056 return UpdateResponse(
1057 1057 executed=True, reason=UpdateFailureReason.NONE,
1058 1058 old=pull_request, new=pull_request_version,
1059 1059 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1060 1060 source_changed=source_changed, target_changed=target_changed)
1061 1061
1062 1062 def _create_version_from_snapshot(self, pull_request):
1063 1063 version = PullRequestVersion()
1064 1064 version.title = pull_request.title
1065 1065 version.description = pull_request.description
1066 1066 version.status = pull_request.status
1067 1067 version.pull_request_state = pull_request.pull_request_state
1068 1068 version.created_on = datetime.datetime.now()
1069 1069 version.updated_on = pull_request.updated_on
1070 1070 version.user_id = pull_request.user_id
1071 1071 version.source_repo = pull_request.source_repo
1072 1072 version.source_ref = pull_request.source_ref
1073 1073 version.target_repo = pull_request.target_repo
1074 1074 version.target_ref = pull_request.target_ref
1075 1075
1076 1076 version._last_merge_source_rev = pull_request._last_merge_source_rev
1077 1077 version._last_merge_target_rev = pull_request._last_merge_target_rev
1078 1078 version.last_merge_status = pull_request.last_merge_status
1079 1079 version.last_merge_metadata = pull_request.last_merge_metadata
1080 1080 version.shadow_merge_ref = pull_request.shadow_merge_ref
1081 1081 version.merge_rev = pull_request.merge_rev
1082 1082 version.reviewer_data = pull_request.reviewer_data
1083 1083
1084 1084 version.revisions = pull_request.revisions
1085 1085 version.common_ancestor_id = pull_request.common_ancestor_id
1086 1086 version.pull_request = pull_request
1087 1087 Session().add(version)
1088 1088 Session().flush()
1089 1089
1090 1090 return version
1091 1091
1092 1092 def _generate_update_diffs(self, pull_request, pull_request_version):
1093 1093
1094 1094 diff_context = (
1095 1095 self.DIFF_CONTEXT +
1096 1096 CommentsModel.needed_extra_diff_context())
1097 1097 hide_whitespace_changes = False
1098 1098 source_repo = pull_request_version.source_repo
1099 1099 source_ref_id = pull_request_version.source_ref_parts.commit_id
1100 1100 target_ref_id = pull_request_version.target_ref_parts.commit_id
1101 1101 old_diff = self._get_diff_from_pr_or_version(
1102 1102 source_repo, source_ref_id, target_ref_id,
1103 1103 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1104 1104
1105 1105 source_repo = pull_request.source_repo
1106 1106 source_ref_id = pull_request.source_ref_parts.commit_id
1107 1107 target_ref_id = pull_request.target_ref_parts.commit_id
1108 1108
1109 1109 new_diff = self._get_diff_from_pr_or_version(
1110 1110 source_repo, source_ref_id, target_ref_id,
1111 1111 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1112 1112
1113 1113 old_diff_data = diffs.DiffProcessor(old_diff)
1114 1114 old_diff_data.prepare()
1115 1115 new_diff_data = diffs.DiffProcessor(new_diff)
1116 1116 new_diff_data.prepare()
1117 1117
1118 1118 return old_diff_data, new_diff_data
1119 1119
1120 1120 def _link_comments_to_version(self, pull_request_version):
1121 1121 """
1122 1122 Link all unlinked comments of this pull request to the given version.
1123 1123
1124 1124 :param pull_request_version: The `PullRequestVersion` to which
1125 1125 the comments shall be linked.
1126 1126
1127 1127 """
1128 1128 pull_request = pull_request_version.pull_request
1129 1129 comments = ChangesetComment.query()\
1130 1130 .filter(
1131 1131 # TODO: johbo: Should we query for the repo at all here?
1132 1132 # Pending decision on how comments of PRs are to be related
1133 1133 # to either the source repo, the target repo or no repo at all.
1134 1134 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1135 1135 ChangesetComment.pull_request == pull_request,
1136 1136 ChangesetComment.pull_request_version == None)\
1137 1137 .order_by(ChangesetComment.comment_id.asc())
1138 1138
1139 1139 # TODO: johbo: Find out why this breaks if it is done in a bulk
1140 1140 # operation.
1141 1141 for comment in comments:
1142 1142 comment.pull_request_version_id = (
1143 1143 pull_request_version.pull_request_version_id)
1144 1144 Session().add(comment)
1145 1145
1146 1146 def _calculate_commit_id_changes(self, old_ids, new_ids):
1147 1147 added = [x for x in new_ids if x not in old_ids]
1148 1148 common = [x for x in new_ids if x in old_ids]
1149 1149 removed = [x for x in old_ids if x not in new_ids]
1150 1150 total = new_ids
1151 1151 return ChangeTuple(added, common, removed, total)
1152 1152
1153 1153 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1154 1154
1155 1155 old_files = OrderedDict()
1156 1156 for diff_data in old_diff_data.parsed_diff:
1157 1157 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1158 1158
1159 1159 added_files = []
1160 1160 modified_files = []
1161 1161 removed_files = []
1162 1162 for diff_data in new_diff_data.parsed_diff:
1163 1163 new_filename = diff_data['filename']
1164 1164 new_hash = md5_safe(diff_data['raw_diff'])
1165 1165
1166 1166 old_hash = old_files.get(new_filename)
1167 1167 if not old_hash:
1168 1168 # file is not present in old diff, we have to figure out from parsed diff
1169 1169 # operation ADD/REMOVE
1170 1170 operations_dict = diff_data['stats']['ops']
1171 1171 if diffs.DEL_FILENODE in operations_dict:
1172 1172 removed_files.append(new_filename)
1173 1173 else:
1174 1174 added_files.append(new_filename)
1175 1175 else:
1176 1176 if new_hash != old_hash:
1177 1177 modified_files.append(new_filename)
1178 1178 # now remove a file from old, since we have seen it already
1179 1179 del old_files[new_filename]
1180 1180
1181 1181 # removed files is when there are present in old, but not in NEW,
1182 1182 # since we remove old files that are present in new diff, left-overs
1183 1183 # if any should be the removed files
1184 1184 removed_files.extend(old_files.keys())
1185 1185
1186 1186 return FileChangeTuple(added_files, modified_files, removed_files)
1187 1187
1188 1188 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1189 1189 """
1190 1190 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1191 1191 so it's always looking the same disregarding on which default
1192 1192 renderer system is using.
1193 1193
1194 1194 :param ancestor_commit_id: ancestor raw_id
1195 1195 :param changes: changes named tuple
1196 1196 :param file_changes: file changes named tuple
1197 1197
1198 1198 """
1199 1199 new_status = ChangesetStatus.get_status_lbl(
1200 1200 ChangesetStatus.STATUS_UNDER_REVIEW)
1201 1201
1202 1202 changed_files = (
1203 1203 file_changes.added + file_changes.modified + file_changes.removed)
1204 1204
1205 1205 params = {
1206 1206 'under_review_label': new_status,
1207 1207 'added_commits': changes.added,
1208 1208 'removed_commits': changes.removed,
1209 1209 'changed_files': changed_files,
1210 1210 'added_files': file_changes.added,
1211 1211 'modified_files': file_changes.modified,
1212 1212 'removed_files': file_changes.removed,
1213 1213 'ancestor_commit_id': ancestor_commit_id
1214 1214 }
1215 1215 renderer = RstTemplateRenderer()
1216 1216 return renderer.render('pull_request_update.mako', **params)
1217 1217
1218 1218 def edit(self, pull_request, title, description, description_renderer, user):
1219 1219 pull_request = self.__get_pull_request(pull_request)
1220 1220 old_data = pull_request.get_api_data(with_merge_state=False)
1221 1221 if pull_request.is_closed():
1222 1222 raise ValueError('This pull request is closed')
1223 1223 if title:
1224 1224 pull_request.title = title
1225 1225 pull_request.description = description
1226 1226 pull_request.updated_on = datetime.datetime.now()
1227 1227 pull_request.description_renderer = description_renderer
1228 1228 Session().add(pull_request)
1229 1229 self._log_audit_action(
1230 1230 'repo.pull_request.edit', {'old_data': old_data},
1231 1231 user, pull_request)
1232 1232
1233 1233 def update_reviewers(self, pull_request, reviewer_data, user):
1234 1234 """
1235 1235 Update the reviewers in the pull request
1236 1236
1237 1237 :param pull_request: the pr to update
1238 1238 :param reviewer_data: list of tuples
1239 1239 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1240 1240 :param user: current use who triggers this action
1241 1241 """
1242 1242
1243 1243 pull_request = self.__get_pull_request(pull_request)
1244 1244 if pull_request.is_closed():
1245 1245 raise ValueError('This pull request is closed')
1246 1246
1247 1247 reviewers = {}
1248 1248 for user_id, reasons, mandatory, role, rules in reviewer_data:
1249 1249 if isinstance(user_id, (int, compat.string_types)):
1250 1250 user_id = self._get_user(user_id).user_id
1251 1251 reviewers[user_id] = {
1252 1252 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1253 1253
1254 1254 reviewers_ids = set(reviewers.keys())
1255 1255 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1256 1256 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1257 1257
1258 1258 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1259 1259
1260 1260 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1261 1261 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1262 1262
1263 1263 log.debug("Adding %s reviewers", ids_to_add)
1264 1264 log.debug("Removing %s reviewers", ids_to_remove)
1265 1265 changed = False
1266 1266 added_audit_reviewers = []
1267 1267 removed_audit_reviewers = []
1268 1268
1269 1269 for uid in ids_to_add:
1270 1270 changed = True
1271 1271 _usr = self._get_user(uid)
1272 1272 reviewer = PullRequestReviewers()
1273 1273 reviewer.user = _usr
1274 1274 reviewer.pull_request = pull_request
1275 1275 reviewer.reasons = reviewers[uid]['reasons']
1276 1276 # NOTE(marcink): mandatory shouldn't be changed now
1277 1277 # reviewer.mandatory = reviewers[uid]['reasons']
1278 1278 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1279 1279 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1280 1280 Session().add(reviewer)
1281 1281 added_audit_reviewers.append(reviewer.get_dict())
1282 1282
1283 1283 for uid in ids_to_remove:
1284 1284 changed = True
1285 1285 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1286 1286 # This is an edge case that handles previous state of having the same reviewer twice.
1287 1287 # this CAN happen due to the lack of DB checks
1288 1288 reviewers = PullRequestReviewers.query()\
1289 1289 .filter(PullRequestReviewers.user_id == uid,
1290 1290 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1291 1291 PullRequestReviewers.pull_request == pull_request)\
1292 1292 .all()
1293 1293
1294 1294 for obj in reviewers:
1295 1295 added_audit_reviewers.append(obj.get_dict())
1296 1296 Session().delete(obj)
1297 1297
1298 1298 if changed:
1299 1299 Session().expire_all()
1300 1300 pull_request.updated_on = datetime.datetime.now()
1301 1301 Session().add(pull_request)
1302 1302
1303 1303 # finally store audit logs
1304 1304 for user_data in added_audit_reviewers:
1305 1305 self._log_audit_action(
1306 1306 'repo.pull_request.reviewer.add', {'data': user_data},
1307 1307 user, pull_request)
1308 1308 for user_data in removed_audit_reviewers:
1309 1309 self._log_audit_action(
1310 1310 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1311 1311 user, pull_request)
1312 1312
1313 1313 self.notify_reviewers(pull_request, ids_to_add, user.get_instance())
1314 1314 return ids_to_add, ids_to_remove
1315 1315
1316 1316 def update_observers(self, pull_request, observer_data, user):
1317 1317 """
1318 1318 Update the observers in the pull request
1319 1319
1320 1320 :param pull_request: the pr to update
1321 1321 :param observer_data: list of tuples
1322 1322 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1323 1323 :param user: current use who triggers this action
1324 1324 """
1325 1325 pull_request = self.__get_pull_request(pull_request)
1326 1326 if pull_request.is_closed():
1327 1327 raise ValueError('This pull request is closed')
1328 1328
1329 1329 observers = {}
1330 1330 for user_id, reasons, mandatory, role, rules in observer_data:
1331 1331 if isinstance(user_id, (int, compat.string_types)):
1332 1332 user_id = self._get_user(user_id).user_id
1333 1333 observers[user_id] = {
1334 1334 'reasons': reasons, 'observers': mandatory, 'role': role}
1335 1335
1336 1336 observers_ids = set(observers.keys())
1337 1337 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1338 1338 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1339 1339
1340 1340 current_observers_ids = set([x.user.user_id for x in current_observers])
1341 1341
1342 1342 ids_to_add = observers_ids.difference(current_observers_ids)
1343 1343 ids_to_remove = current_observers_ids.difference(observers_ids)
1344 1344
1345 1345 log.debug("Adding %s observer", ids_to_add)
1346 1346 log.debug("Removing %s observer", ids_to_remove)
1347 1347 changed = False
1348 1348 added_audit_observers = []
1349 1349 removed_audit_observers = []
1350 1350
1351 1351 for uid in ids_to_add:
1352 1352 changed = True
1353 1353 _usr = self._get_user(uid)
1354 1354 observer = PullRequestReviewers()
1355 1355 observer.user = _usr
1356 1356 observer.pull_request = pull_request
1357 1357 observer.reasons = observers[uid]['reasons']
1358 1358 # NOTE(marcink): mandatory shouldn't be changed now
1359 1359 # observer.mandatory = observer[uid]['reasons']
1360 1360
1361 1361 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1362 1362 observer.role = PullRequestReviewers.ROLE_OBSERVER
1363 1363 Session().add(observer)
1364 1364 added_audit_observers.append(observer.get_dict())
1365 1365
1366 1366 for uid in ids_to_remove:
1367 1367 changed = True
1368 1368 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1369 1369 # This is an edge case that handles previous state of having the same reviewer twice.
1370 1370 # this CAN happen due to the lack of DB checks
1371 1371 observers = PullRequestReviewers.query()\
1372 1372 .filter(PullRequestReviewers.user_id == uid,
1373 1373 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1374 1374 PullRequestReviewers.pull_request == pull_request)\
1375 1375 .all()
1376 1376
1377 1377 for obj in observers:
1378 1378 added_audit_observers.append(obj.get_dict())
1379 1379 Session().delete(obj)
1380 1380
1381 1381 if changed:
1382 1382 Session().expire_all()
1383 1383 pull_request.updated_on = datetime.datetime.now()
1384 1384 Session().add(pull_request)
1385 1385
1386 1386 # finally store audit logs
1387 1387 for user_data in added_audit_observers:
1388 1388 self._log_audit_action(
1389 1389 'repo.pull_request.observer.add', {'data': user_data},
1390 1390 user, pull_request)
1391 1391 for user_data in removed_audit_observers:
1392 1392 self._log_audit_action(
1393 1393 'repo.pull_request.observer.delete', {'old_data': user_data},
1394 1394 user, pull_request)
1395 1395
1396 1396 self.notify_observers(pull_request, ids_to_add, user.get_instance())
1397 1397 return ids_to_add, ids_to_remove
1398 1398
1399 1399 def get_url(self, pull_request, request=None, permalink=False):
1400 1400 if not request:
1401 1401 request = get_current_request()
1402 1402
1403 1403 if permalink:
1404 1404 return request.route_url(
1405 1405 'pull_requests_global',
1406 1406 pull_request_id=pull_request.pull_request_id,)
1407 1407 else:
1408 1408 return request.route_url('pullrequest_show',
1409 1409 repo_name=safe_str(pull_request.target_repo.repo_name),
1410 1410 pull_request_id=pull_request.pull_request_id,)
1411 1411
1412 1412 def get_shadow_clone_url(self, pull_request, request=None):
1413 1413 """
1414 1414 Returns qualified url pointing to the shadow repository. If this pull
1415 1415 request is closed there is no shadow repository and ``None`` will be
1416 1416 returned.
1417 1417 """
1418 1418 if pull_request.is_closed():
1419 1419 return None
1420 1420 else:
1421 1421 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1422 1422 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1423 1423
1424 1424 def _notify_reviewers(self, pull_request, user_ids, role, user):
1425 1425 # notification to reviewers/observers
1426 1426 if not user_ids:
1427 1427 return
1428 1428
1429 1429 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1430 1430
1431 1431 pull_request_obj = pull_request
1432 1432 # get the current participants of this pull request
1433 1433 recipients = user_ids
1434 1434 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1435 1435
1436 1436 pr_source_repo = pull_request_obj.source_repo
1437 1437 pr_target_repo = pull_request_obj.target_repo
1438 1438
1439 1439 pr_url = h.route_url('pullrequest_show',
1440 1440 repo_name=pr_target_repo.repo_name,
1441 1441 pull_request_id=pull_request_obj.pull_request_id,)
1442 1442
1443 1443 # set some variables for email notification
1444 1444 pr_target_repo_url = h.route_url(
1445 1445 'repo_summary', repo_name=pr_target_repo.repo_name)
1446 1446
1447 1447 pr_source_repo_url = h.route_url(
1448 1448 'repo_summary', repo_name=pr_source_repo.repo_name)
1449 1449
1450 1450 # pull request specifics
1451 1451 pull_request_commits = [
1452 1452 (x.raw_id, x.message)
1453 1453 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1454 1454
1455 1455 current_rhodecode_user = user
1456 1456 kwargs = {
1457 1457 'user': current_rhodecode_user,
1458 1458 'pull_request_author': pull_request.author,
1459 1459 'pull_request': pull_request_obj,
1460 1460 'pull_request_commits': pull_request_commits,
1461 1461
1462 1462 'pull_request_target_repo': pr_target_repo,
1463 1463 'pull_request_target_repo_url': pr_target_repo_url,
1464 1464
1465 1465 'pull_request_source_repo': pr_source_repo,
1466 1466 'pull_request_source_repo_url': pr_source_repo_url,
1467 1467
1468 1468 'pull_request_url': pr_url,
1469 1469 'thread_ids': [pr_url],
1470 1470 'user_role': role
1471 1471 }
1472 1472
1473 1473 # pre-generate the subject for notification itself
1474 1474 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1475 1475 notification_type, **kwargs)
1476 1476
1477 1477 # create notification objects, and emails
1478 1478 NotificationModel().create(
1479 1479 created_by=current_rhodecode_user,
1480 1480 notification_subject=subject,
1481 1481 notification_body=body_plaintext,
1482 1482 notification_type=notification_type,
1483 1483 recipients=recipients,
1484 1484 email_kwargs=kwargs,
1485 1485 )
1486 1486
1487 1487 def notify_reviewers(self, pull_request, reviewers_ids, user):
1488 1488 return self._notify_reviewers(pull_request, reviewers_ids,
1489 1489 PullRequestReviewers.ROLE_REVIEWER, user)
1490 1490
1491 1491 def notify_observers(self, pull_request, observers_ids, user):
1492 1492 return self._notify_reviewers(pull_request, observers_ids,
1493 1493 PullRequestReviewers.ROLE_OBSERVER, user)
1494 1494
1495 1495 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1496 1496 commit_changes, file_changes):
1497 1497
1498 1498 updating_user_id = updating_user.user_id
1499 1499 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1500 1500 # NOTE(marcink): send notification to all other users except to
1501 1501 # person who updated the PR
1502 1502 recipients = reviewers.difference(set([updating_user_id]))
1503 1503
1504 1504 log.debug('Notify following recipients about pull-request update %s', recipients)
1505 1505
1506 1506 pull_request_obj = pull_request
1507 1507
1508 1508 # send email about the update
1509 1509 changed_files = (
1510 1510 file_changes.added + file_changes.modified + file_changes.removed)
1511 1511
1512 1512 pr_source_repo = pull_request_obj.source_repo
1513 1513 pr_target_repo = pull_request_obj.target_repo
1514 1514
1515 1515 pr_url = h.route_url('pullrequest_show',
1516 1516 repo_name=pr_target_repo.repo_name,
1517 1517 pull_request_id=pull_request_obj.pull_request_id,)
1518 1518
1519 1519 # set some variables for email notification
1520 1520 pr_target_repo_url = h.route_url(
1521 1521 'repo_summary', repo_name=pr_target_repo.repo_name)
1522 1522
1523 1523 pr_source_repo_url = h.route_url(
1524 1524 'repo_summary', repo_name=pr_source_repo.repo_name)
1525 1525
1526 1526 email_kwargs = {
1527 1527 'date': datetime.datetime.now(),
1528 1528 'updating_user': updating_user,
1529 1529
1530 1530 'pull_request': pull_request_obj,
1531 1531
1532 1532 'pull_request_target_repo': pr_target_repo,
1533 1533 'pull_request_target_repo_url': pr_target_repo_url,
1534 1534
1535 1535 'pull_request_source_repo': pr_source_repo,
1536 1536 'pull_request_source_repo_url': pr_source_repo_url,
1537 1537
1538 1538 'pull_request_url': pr_url,
1539 1539
1540 1540 'ancestor_commit_id': ancestor_commit_id,
1541 1541 'added_commits': commit_changes.added,
1542 1542 'removed_commits': commit_changes.removed,
1543 1543 'changed_files': changed_files,
1544 1544 'added_files': file_changes.added,
1545 1545 'modified_files': file_changes.modified,
1546 1546 'removed_files': file_changes.removed,
1547 1547 'thread_ids': [pr_url],
1548 1548 }
1549 1549
1550 1550 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1551 1551 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1552 1552
1553 1553 # create notification objects, and emails
1554 1554 NotificationModel().create(
1555 1555 created_by=updating_user,
1556 1556 notification_subject=subject,
1557 1557 notification_body=body_plaintext,
1558 1558 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1559 1559 recipients=recipients,
1560 1560 email_kwargs=email_kwargs,
1561 1561 )
1562 1562
1563 1563 def delete(self, pull_request, user=None):
1564 1564 if not user:
1565 1565 user = getattr(get_current_rhodecode_user(), 'username', None)
1566 1566
1567 1567 pull_request = self.__get_pull_request(pull_request)
1568 1568 old_data = pull_request.get_api_data(with_merge_state=False)
1569 1569 self._cleanup_merge_workspace(pull_request)
1570 1570 self._log_audit_action(
1571 1571 'repo.pull_request.delete', {'old_data': old_data},
1572 1572 user, pull_request)
1573 1573 Session().delete(pull_request)
1574 1574
1575 1575 def close_pull_request(self, pull_request, user):
1576 1576 pull_request = self.__get_pull_request(pull_request)
1577 1577 self._cleanup_merge_workspace(pull_request)
1578 1578 pull_request.status = PullRequest.STATUS_CLOSED
1579 1579 pull_request.updated_on = datetime.datetime.now()
1580 1580 Session().add(pull_request)
1581 1581 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1582 1582
1583 1583 pr_data = pull_request.get_api_data(with_merge_state=False)
1584 1584 self._log_audit_action(
1585 1585 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1586 1586
1587 1587 def close_pull_request_with_comment(
1588 1588 self, pull_request, user, repo, message=None, auth_user=None):
1589 1589
1590 1590 pull_request_review_status = pull_request.calculated_review_status()
1591 1591
1592 1592 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1593 1593 # approved only if we have voting consent
1594 1594 status = ChangesetStatus.STATUS_APPROVED
1595 1595 else:
1596 1596 status = ChangesetStatus.STATUS_REJECTED
1597 1597 status_lbl = ChangesetStatus.get_status_lbl(status)
1598 1598
1599 1599 default_message = (
1600 1600 'Closing with status change {transition_icon} {status}.'
1601 1601 ).format(transition_icon='>', status=status_lbl)
1602 1602 text = message or default_message
1603 1603
1604 1604 # create a comment, and link it to new status
1605 1605 comment = CommentsModel().create(
1606 1606 text=text,
1607 1607 repo=repo.repo_id,
1608 1608 user=user.user_id,
1609 1609 pull_request=pull_request.pull_request_id,
1610 1610 status_change=status_lbl,
1611 1611 status_change_type=status,
1612 1612 closing_pr=True,
1613 1613 auth_user=auth_user,
1614 1614 )
1615 1615
1616 1616 # calculate old status before we change it
1617 1617 old_calculated_status = pull_request.calculated_review_status()
1618 1618 ChangesetStatusModel().set_status(
1619 1619 repo.repo_id,
1620 1620 status,
1621 1621 user.user_id,
1622 1622 comment=comment,
1623 1623 pull_request=pull_request.pull_request_id
1624 1624 )
1625 1625
1626 1626 Session().flush()
1627 1627
1628 1628 self.trigger_pull_request_hook(pull_request, user, 'comment',
1629 1629 data={'comment': comment})
1630 1630
1631 1631 # we now calculate the status of pull request again, and based on that
1632 1632 # calculation trigger status change. This might happen in cases
1633 1633 # that non-reviewer admin closes a pr, which means his vote doesn't
1634 1634 # change the status, while if he's a reviewer this might change it.
1635 1635 calculated_status = pull_request.calculated_review_status()
1636 1636 if old_calculated_status != calculated_status:
1637 1637 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1638 1638 data={'status': calculated_status})
1639 1639
1640 1640 # finally close the PR
1641 1641 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1642 1642
1643 1643 return comment, status
1644 1644
1645 1645 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1646 1646 _ = translator or get_current_request().translate
1647 1647
1648 1648 if not self._is_merge_enabled(pull_request):
1649 1649 return None, False, _('Server-side pull request merging is disabled.')
1650 1650
1651 1651 if pull_request.is_closed():
1652 1652 return None, False, _('This pull request is closed.')
1653 1653
1654 1654 merge_possible, msg = self._check_repo_requirements(
1655 1655 target=pull_request.target_repo, source=pull_request.source_repo,
1656 1656 translator=_)
1657 1657 if not merge_possible:
1658 1658 return None, merge_possible, msg
1659 1659
1660 1660 try:
1661 1661 merge_response = self._try_merge(
1662 1662 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1663 1663 log.debug("Merge response: %s", merge_response)
1664 1664 return merge_response, merge_response.possible, merge_response.merge_status_message
1665 1665 except NotImplementedError:
1666 1666 return None, False, _('Pull request merging is not supported.')
1667 1667
1668 1668 def _check_repo_requirements(self, target, source, translator):
1669 1669 """
1670 1670 Check if `target` and `source` have compatible requirements.
1671 1671
1672 1672 Currently this is just checking for largefiles.
1673 1673 """
1674 1674 _ = translator
1675 1675 target_has_largefiles = self._has_largefiles(target)
1676 1676 source_has_largefiles = self._has_largefiles(source)
1677 1677 merge_possible = True
1678 1678 message = u''
1679 1679
1680 1680 if target_has_largefiles != source_has_largefiles:
1681 1681 merge_possible = False
1682 1682 if source_has_largefiles:
1683 1683 message = _(
1684 1684 'Target repository large files support is disabled.')
1685 1685 else:
1686 1686 message = _(
1687 1687 'Source repository large files support is disabled.')
1688 1688
1689 1689 return merge_possible, message
1690 1690
1691 1691 def _has_largefiles(self, repo):
1692 1692 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1693 1693 'extensions', 'largefiles')
1694 1694 return largefiles_ui and largefiles_ui[0].active
1695 1695
1696 1696 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1697 1697 """
1698 1698 Try to merge the pull request and return the merge status.
1699 1699 """
1700 1700 log.debug(
1701 1701 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1702 1702 pull_request.pull_request_id, force_shadow_repo_refresh)
1703 1703 target_vcs = pull_request.target_repo.scm_instance()
1704 1704 # Refresh the target reference.
1705 1705 try:
1706 1706 target_ref = self._refresh_reference(
1707 1707 pull_request.target_ref_parts, target_vcs)
1708 1708 except CommitDoesNotExistError:
1709 1709 merge_state = MergeResponse(
1710 1710 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1711 1711 metadata={'target_ref': pull_request.target_ref_parts})
1712 1712 return merge_state
1713 1713
1714 1714 target_locked = pull_request.target_repo.locked
1715 1715 if target_locked and target_locked[0]:
1716 1716 locked_by = 'user:{}'.format(target_locked[0])
1717 1717 log.debug("The target repository is locked by %s.", locked_by)
1718 1718 merge_state = MergeResponse(
1719 1719 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1720 1720 metadata={'locked_by': locked_by})
1721 1721 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1722 1722 pull_request, target_ref):
1723 1723 log.debug("Refreshing the merge status of the repository.")
1724 1724 merge_state = self._refresh_merge_state(
1725 1725 pull_request, target_vcs, target_ref)
1726 1726 else:
1727 1727 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1728 1728 metadata = {
1729 1729 'unresolved_files': '',
1730 1730 'target_ref': pull_request.target_ref_parts,
1731 1731 'source_ref': pull_request.source_ref_parts,
1732 1732 }
1733 1733 if pull_request.last_merge_metadata:
1734 1734 metadata.update(pull_request.last_merge_metadata_parsed)
1735 1735
1736 1736 if not possible and target_ref.type == 'branch':
1737 1737 # NOTE(marcink): case for mercurial multiple heads on branch
1738 1738 heads = target_vcs._heads(target_ref.name)
1739 1739 if len(heads) != 1:
1740 1740 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1741 1741 metadata.update({
1742 1742 'heads': heads
1743 1743 })
1744 1744
1745 1745 merge_state = MergeResponse(
1746 1746 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1747 1747
1748 1748 return merge_state
1749 1749
1750 1750 def _refresh_reference(self, reference, vcs_repository):
1751 1751 if reference.type in self.UPDATABLE_REF_TYPES:
1752 1752 name_or_id = reference.name
1753 1753 else:
1754 1754 name_or_id = reference.commit_id
1755 1755
1756 1756 refreshed_commit = vcs_repository.get_commit(name_or_id)
1757 1757 refreshed_reference = Reference(
1758 1758 reference.type, reference.name, refreshed_commit.raw_id)
1759 1759 return refreshed_reference
1760 1760
1761 1761 def _needs_merge_state_refresh(self, pull_request, target_reference):
1762 1762 return not(
1763 1763 pull_request.revisions and
1764 1764 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1765 1765 target_reference.commit_id == pull_request._last_merge_target_rev)
1766 1766
1767 1767 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1768 1768 workspace_id = self._workspace_id(pull_request)
1769 1769 source_vcs = pull_request.source_repo.scm_instance()
1770 1770 repo_id = pull_request.target_repo.repo_id
1771 1771 use_rebase = self._use_rebase_for_merging(pull_request)
1772 1772 close_branch = self._close_branch_before_merging(pull_request)
1773 1773 merge_state = target_vcs.merge(
1774 1774 repo_id, workspace_id,
1775 1775 target_reference, source_vcs, pull_request.source_ref_parts,
1776 1776 dry_run=True, use_rebase=use_rebase,
1777 1777 close_branch=close_branch)
1778 1778
1779 1779 # Do not store the response if there was an unknown error.
1780 1780 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1781 1781 pull_request._last_merge_source_rev = \
1782 1782 pull_request.source_ref_parts.commit_id
1783 1783 pull_request._last_merge_target_rev = target_reference.commit_id
1784 1784 pull_request.last_merge_status = merge_state.failure_reason
1785 1785 pull_request.last_merge_metadata = merge_state.metadata
1786 1786
1787 1787 pull_request.shadow_merge_ref = merge_state.merge_ref
1788 1788 Session().add(pull_request)
1789 1789 Session().commit()
1790 1790
1791 1791 return merge_state
1792 1792
1793 1793 def _workspace_id(self, pull_request):
1794 1794 workspace_id = 'pr-%s' % pull_request.pull_request_id
1795 1795 return workspace_id
1796 1796
1797 1797 def generate_repo_data(self, repo, commit_id=None, branch=None,
1798 1798 bookmark=None, translator=None):
1799 1799 from rhodecode.model.repo import RepoModel
1800 1800
1801 1801 all_refs, selected_ref = \
1802 1802 self._get_repo_pullrequest_sources(
1803 1803 repo.scm_instance(), commit_id=commit_id,
1804 1804 branch=branch, bookmark=bookmark, translator=translator)
1805 1805
1806 1806 refs_select2 = []
1807 1807 for element in all_refs:
1808 1808 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1809 1809 refs_select2.append({'text': element[1], 'children': children})
1810 1810
1811 1811 return {
1812 1812 'user': {
1813 1813 'user_id': repo.user.user_id,
1814 1814 'username': repo.user.username,
1815 1815 'firstname': repo.user.first_name,
1816 1816 'lastname': repo.user.last_name,
1817 1817 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1818 1818 },
1819 1819 'name': repo.repo_name,
1820 1820 'link': RepoModel().get_url(repo),
1821 1821 'description': h.chop_at_smart(repo.description_safe, '\n'),
1822 1822 'refs': {
1823 1823 'all_refs': all_refs,
1824 1824 'selected_ref': selected_ref,
1825 1825 'select2_refs': refs_select2
1826 1826 }
1827 1827 }
1828 1828
1829 1829 def generate_pullrequest_title(self, source, source_ref, target):
1830 1830 return u'{source}#{at_ref} to {target}'.format(
1831 1831 source=source,
1832 1832 at_ref=source_ref,
1833 1833 target=target,
1834 1834 )
1835 1835
1836 1836 def _cleanup_merge_workspace(self, pull_request):
1837 1837 # Merging related cleanup
1838 1838 repo_id = pull_request.target_repo.repo_id
1839 1839 target_scm = pull_request.target_repo.scm_instance()
1840 1840 workspace_id = self._workspace_id(pull_request)
1841 1841
1842 1842 try:
1843 1843 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1844 1844 except NotImplementedError:
1845 1845 pass
1846 1846
1847 1847 def _get_repo_pullrequest_sources(
1848 1848 self, repo, commit_id=None, branch=None, bookmark=None,
1849 1849 translator=None):
1850 1850 """
1851 1851 Return a structure with repo's interesting commits, suitable for
1852 1852 the selectors in pullrequest controller
1853 1853
1854 1854 :param commit_id: a commit that must be in the list somehow
1855 1855 and selected by default
1856 1856 :param branch: a branch that must be in the list and selected
1857 1857 by default - even if closed
1858 1858 :param bookmark: a bookmark that must be in the list and selected
1859 1859 """
1860 1860 _ = translator or get_current_request().translate
1861 1861
1862 1862 commit_id = safe_str(commit_id) if commit_id else None
1863 1863 branch = safe_unicode(branch) if branch else None
1864 1864 bookmark = safe_unicode(bookmark) if bookmark else None
1865 1865
1866 1866 selected = None
1867 1867
1868 1868 # order matters: first source that has commit_id in it will be selected
1869 1869 sources = []
1870 1870 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1871 1871 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1872 1872
1873 1873 if commit_id:
1874 1874 ref_commit = (h.short_id(commit_id), commit_id)
1875 1875 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1876 1876
1877 1877 sources.append(
1878 1878 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1879 1879 )
1880 1880
1881 1881 groups = []
1882 1882
1883 1883 for group_key, ref_list, group_name, match in sources:
1884 1884 group_refs = []
1885 1885 for ref_name, ref_id in ref_list:
1886 1886 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1887 1887 group_refs.append((ref_key, ref_name))
1888 1888
1889 1889 if not selected:
1890 1890 if set([commit_id, match]) & set([ref_id, ref_name]):
1891 1891 selected = ref_key
1892 1892
1893 1893 if group_refs:
1894 1894 groups.append((group_refs, group_name))
1895 1895
1896 1896 if not selected:
1897 1897 ref = commit_id or branch or bookmark
1898 1898 if ref:
1899 1899 raise CommitDoesNotExistError(
1900 1900 u'No commit refs could be found matching: {}'.format(ref))
1901 1901 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1902 1902 selected = u'branch:{}:{}'.format(
1903 1903 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1904 1904 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1905 1905 )
1906 1906 elif repo.commit_ids:
1907 1907 # make the user select in this case
1908 1908 selected = None
1909 1909 else:
1910 1910 raise EmptyRepositoryError()
1911 1911 return groups, selected
1912 1912
1913 1913 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1914 1914 hide_whitespace_changes, diff_context):
1915 1915
1916 1916 return self._get_diff_from_pr_or_version(
1917 1917 source_repo, source_ref_id, target_ref_id,
1918 1918 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1919 1919
1920 1920 def _get_diff_from_pr_or_version(
1921 1921 self, source_repo, source_ref_id, target_ref_id,
1922 1922 hide_whitespace_changes, diff_context):
1923 1923
1924 1924 target_commit = source_repo.get_commit(
1925 1925 commit_id=safe_str(target_ref_id))
1926 1926 source_commit = source_repo.get_commit(
1927 1927 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1928 1928 if isinstance(source_repo, Repository):
1929 1929 vcs_repo = source_repo.scm_instance()
1930 1930 else:
1931 1931 vcs_repo = source_repo
1932 1932
1933 1933 # TODO: johbo: In the context of an update, we cannot reach
1934 1934 # the old commit anymore with our normal mechanisms. It needs
1935 1935 # some sort of special support in the vcs layer to avoid this
1936 1936 # workaround.
1937 1937 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1938 1938 vcs_repo.alias == 'git'):
1939 1939 source_commit.raw_id = safe_str(source_ref_id)
1940 1940
1941 1941 log.debug('calculating diff between '
1942 1942 'source_ref:%s and target_ref:%s for repo `%s`',
1943 1943 target_ref_id, source_ref_id,
1944 1944 safe_unicode(vcs_repo.path))
1945 1945
1946 1946 vcs_diff = vcs_repo.get_diff(
1947 1947 commit1=target_commit, commit2=source_commit,
1948 1948 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1949 1949 return vcs_diff
1950 1950
1951 1951 def _is_merge_enabled(self, pull_request):
1952 1952 return self._get_general_setting(
1953 1953 pull_request, 'rhodecode_pr_merge_enabled')
1954 1954
1955 1955 def _use_rebase_for_merging(self, pull_request):
1956 1956 repo_type = pull_request.target_repo.repo_type
1957 1957 if repo_type == 'hg':
1958 1958 return self._get_general_setting(
1959 1959 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1960 1960 elif repo_type == 'git':
1961 1961 return self._get_general_setting(
1962 1962 pull_request, 'rhodecode_git_use_rebase_for_merging')
1963 1963
1964 1964 return False
1965 1965
1966 1966 def _user_name_for_merging(self, pull_request, user):
1967 1967 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1968 1968 if env_user_name_attr and hasattr(user, env_user_name_attr):
1969 1969 user_name_attr = env_user_name_attr
1970 1970 else:
1971 1971 user_name_attr = 'short_contact'
1972 1972
1973 1973 user_name = getattr(user, user_name_attr)
1974 1974 return user_name
1975 1975
1976 1976 def _close_branch_before_merging(self, pull_request):
1977 1977 repo_type = pull_request.target_repo.repo_type
1978 1978 if repo_type == 'hg':
1979 1979 return self._get_general_setting(
1980 1980 pull_request, 'rhodecode_hg_close_branch_before_merging')
1981 1981 elif repo_type == 'git':
1982 1982 return self._get_general_setting(
1983 1983 pull_request, 'rhodecode_git_close_branch_before_merging')
1984 1984
1985 1985 return False
1986 1986
1987 1987 def _get_general_setting(self, pull_request, settings_key, default=False):
1988 1988 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1989 1989 settings = settings_model.get_general_settings()
1990 1990 return settings.get(settings_key, default)
1991 1991
1992 1992 def _log_audit_action(self, action, action_data, user, pull_request):
1993 1993 audit_logger.store(
1994 1994 action=action,
1995 1995 action_data=action_data,
1996 1996 user=user,
1997 1997 repo=pull_request.target_repo)
1998 1998
1999 1999 def get_reviewer_functions(self):
2000 2000 """
2001 2001 Fetches functions for validation and fetching default reviewers.
2002 2002 If available we use the EE package, else we fallback to CE
2003 2003 package functions
2004 2004 """
2005 2005 try:
2006 2006 from rc_reviewers.utils import get_default_reviewers_data
2007 2007 from rc_reviewers.utils import validate_default_reviewers
2008 2008 from rc_reviewers.utils import validate_observers
2009 2009 except ImportError:
2010 2010 from rhodecode.apps.repository.utils import get_default_reviewers_data
2011 2011 from rhodecode.apps.repository.utils import validate_default_reviewers
2012 2012 from rhodecode.apps.repository.utils import validate_observers
2013 2013
2014 2014 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2015 2015
2016 2016
2017 2017 class MergeCheck(object):
2018 2018 """
2019 2019 Perform Merge Checks and returns a check object which stores information
2020 2020 about merge errors, and merge conditions
2021 2021 """
2022 2022 TODO_CHECK = 'todo'
2023 2023 PERM_CHECK = 'perm'
2024 2024 REVIEW_CHECK = 'review'
2025 2025 MERGE_CHECK = 'merge'
2026 2026 WIP_CHECK = 'wip'
2027 2027
2028 2028 def __init__(self):
2029 2029 self.review_status = None
2030 2030 self.merge_possible = None
2031 2031 self.merge_msg = ''
2032 2032 self.merge_response = None
2033 2033 self.failed = None
2034 2034 self.errors = []
2035 2035 self.error_details = OrderedDict()
2036 2036 self.source_commit = AttributeDict()
2037 2037 self.target_commit = AttributeDict()
2038 2038
2039 2039 def __repr__(self):
2040 2040 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2041 2041 self.merge_possible, self.failed, self.errors)
2042 2042
2043 2043 def push_error(self, error_type, message, error_key, details):
2044 2044 self.failed = True
2045 2045 self.errors.append([error_type, message])
2046 2046 self.error_details[error_key] = dict(
2047 2047 details=details,
2048 2048 error_type=error_type,
2049 2049 message=message
2050 2050 )
2051 2051
2052 2052 @classmethod
2053 2053 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2054 2054 force_shadow_repo_refresh=False):
2055 2055 _ = translator
2056 2056 merge_check = cls()
2057 2057
2058 2058 # title has WIP:
2059 2059 if pull_request.work_in_progress:
2060 2060 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2061 2061
2062 2062 msg = _('WIP marker in title prevents from accidental merge.')
2063 2063 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2064 2064 if fail_early:
2065 2065 return merge_check
2066 2066
2067 2067 # permissions to merge
2068 2068 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2069 2069 if not user_allowed_to_merge:
2070 2070 log.debug("MergeCheck: cannot merge, approval is pending.")
2071 2071
2072 2072 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2073 2073 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2074 2074 if fail_early:
2075 2075 return merge_check
2076 2076
2077 2077 # permission to merge into the target branch
2078 2078 target_commit_id = pull_request.target_ref_parts.commit_id
2079 2079 if pull_request.target_ref_parts.type == 'branch':
2080 2080 branch_name = pull_request.target_ref_parts.name
2081 2081 else:
2082 2082 # for mercurial we can always figure out the branch from the commit
2083 2083 # in case of bookmark
2084 2084 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2085 2085 branch_name = target_commit.branch
2086 2086
2087 2087 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2088 2088 pull_request.target_repo.repo_name, branch_name)
2089 2089 if branch_perm and branch_perm == 'branch.none':
2090 2090 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2091 2091 branch_name, rule)
2092 2092 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2093 2093 if fail_early:
2094 2094 return merge_check
2095 2095
2096 2096 # review status, must be always present
2097 2097 review_status = pull_request.calculated_review_status()
2098 2098 merge_check.review_status = review_status
2099 2099
2100 2100 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2101 2101 if not status_approved:
2102 2102 log.debug("MergeCheck: cannot merge, approval is pending.")
2103 2103
2104 2104 msg = _('Pull request reviewer approval is pending.')
2105 2105
2106 2106 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2107 2107
2108 2108 if fail_early:
2109 2109 return merge_check
2110 2110
2111 2111 # left over TODOs
2112 2112 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2113 2113 if todos:
2114 2114 log.debug("MergeCheck: cannot merge, {} "
2115 2115 "unresolved TODOs left.".format(len(todos)))
2116 2116
2117 2117 if len(todos) == 1:
2118 2118 msg = _('Cannot merge, {} TODO still not resolved.').format(
2119 2119 len(todos))
2120 2120 else:
2121 2121 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2122 2122 len(todos))
2123 2123
2124 2124 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2125 2125
2126 2126 if fail_early:
2127 2127 return merge_check
2128 2128
2129 2129 # merge possible, here is the filesystem simulation + shadow repo
2130 2130 merge_response, merge_status, msg = PullRequestModel().merge_status(
2131 2131 pull_request, translator=translator,
2132 2132 force_shadow_repo_refresh=force_shadow_repo_refresh)
2133 2133
2134 2134 merge_check.merge_possible = merge_status
2135 2135 merge_check.merge_msg = msg
2136 2136 merge_check.merge_response = merge_response
2137 2137
2138 2138 source_ref_id = pull_request.source_ref_parts.commit_id
2139 2139 target_ref_id = pull_request.target_ref_parts.commit_id
2140 2140
2141 2141 try:
2142 2142 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2143 2143 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2144 2144 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2145 2145 merge_check.source_commit.current_raw_id = source_commit.raw_id
2146 2146 merge_check.source_commit.previous_raw_id = source_ref_id
2147 2147
2148 2148 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2149 2149 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2150 2150 merge_check.target_commit.current_raw_id = target_commit.raw_id
2151 2151 merge_check.target_commit.previous_raw_id = target_ref_id
2152 2152 except (SourceRefMissing, TargetRefMissing):
2153 2153 pass
2154 2154
2155 2155 if not merge_status:
2156 2156 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2157 2157 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2158 2158
2159 2159 if fail_early:
2160 2160 return merge_check
2161 2161
2162 2162 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2163 2163 return merge_check
2164 2164
2165 2165 @classmethod
2166 2166 def get_merge_conditions(cls, pull_request, translator):
2167 2167 _ = translator
2168 2168 merge_details = {}
2169 2169
2170 2170 model = PullRequestModel()
2171 2171 use_rebase = model._use_rebase_for_merging(pull_request)
2172 2172
2173 2173 if use_rebase:
2174 2174 merge_details['merge_strategy'] = dict(
2175 2175 details={},
2176 2176 message=_('Merge strategy: rebase')
2177 2177 )
2178 2178 else:
2179 2179 merge_details['merge_strategy'] = dict(
2180 2180 details={},
2181 2181 message=_('Merge strategy: explicit merge commit')
2182 2182 )
2183 2183
2184 2184 close_branch = model._close_branch_before_merging(pull_request)
2185 2185 if close_branch:
2186 2186 repo_type = pull_request.target_repo.repo_type
2187 2187 close_msg = ''
2188 2188 if repo_type == 'hg':
2189 2189 close_msg = _('Source branch will be closed before the merge.')
2190 2190 elif repo_type == 'git':
2191 2191 close_msg = _('Source branch will be deleted after the merge.')
2192 2192
2193 2193 merge_details['close_branch'] = dict(
2194 2194 details={},
2195 2195 message=close_msg
2196 2196 )
2197 2197
2198 2198 return merge_details
2199 2199
2200 2200
2201 2201 ChangeTuple = collections.namedtuple(
2202 2202 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2203 2203
2204 2204 FileChangeTuple = collections.namedtuple(
2205 2205 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,1184 +1,1188 b''
1 1 // # Copyright (C) 2010-2020 RhodeCode GmbH
2 2 // #
3 3 // # This program is free software: you can redistribute it and/or modify
4 4 // # it under the terms of the GNU Affero General Public License, version 3
5 5 // # (only), as published by the Free Software Foundation.
6 6 // #
7 7 // # This program is distributed in the hope that it will be useful,
8 8 // # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 // # GNU General Public License for more details.
11 11 // #
12 12 // # You should have received a copy of the GNU Affero General Public License
13 13 // # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 // #
15 15 // # This program is dual-licensed. If you wish to learn more about the
16 16 // # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 // # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19
20 20 var prButtonLockChecks = {
21 21 'compare': false,
22 22 'reviewers': false
23 23 };
24 24
25 25 /**
26 26 * lock button until all checks and loads are made. E.g reviewer calculation
27 27 * should prevent from submitting a PR
28 28 * @param lockEnabled
29 29 * @param msg
30 30 * @param scope
31 31 */
32 32 var prButtonLock = function(lockEnabled, msg, scope) {
33 33 scope = scope || 'all';
34 34 if (scope == 'all'){
35 35 prButtonLockChecks['compare'] = !lockEnabled;
36 36 prButtonLockChecks['reviewers'] = !lockEnabled;
37 37 } else if (scope == 'compare') {
38 38 prButtonLockChecks['compare'] = !lockEnabled;
39 39 } else if (scope == 'reviewers'){
40 40 prButtonLockChecks['reviewers'] = !lockEnabled;
41 41 }
42 42 var checksMeet = prButtonLockChecks.compare && prButtonLockChecks.reviewers;
43 43 if (lockEnabled) {
44 44 $('#pr_submit').attr('disabled', 'disabled');
45 45 }
46 46 else if (checksMeet) {
47 47 $('#pr_submit').removeAttr('disabled');
48 48 }
49 49
50 50 if (msg) {
51 51 $('#pr_open_message').html(msg);
52 52 }
53 53 };
54 54
55 55
56 56 /**
57 57 Generate Title and Description for a PullRequest.
58 58 In case of 1 commits, the title and description is that one commit
59 59 in case of multiple commits, we iterate on them with max N number of commits,
60 60 and build description in a form
61 61 - commitN
62 62 - commitN+1
63 63 ...
64 64
65 65 Title is then constructed from branch names, or other references,
66 66 replacing '-' and '_' into spaces
67 67
68 68 * @param sourceRef
69 69 * @param elements
70 70 * @param limit
71 71 * @returns {*[]}
72 72 */
73 73 var getTitleAndDescription = function(sourceRefType, sourceRef, elements, limit) {
74 74 var title = '';
75 75 var desc = '';
76 76
77 77 $.each($(elements).get().reverse().slice(0, limit), function(idx, value) {
78 78 var rawMessage = value['message'];
79 79 desc += '- ' + rawMessage.split('\n')[0].replace(/\n+$/, "") + '\n';
80 80 });
81 81 // only 1 commit, use commit message as title
82 82 if (elements.length === 1) {
83 83 var rawMessage = elements[0]['message'];
84 84 title = rawMessage.split('\n')[0];
85 85 }
86 86 else {
87 87 // use reference name
88 88 var normalizedRef = sourceRef.replace(/-/g, ' ').replace(/_/g, ' ').capitalizeFirstLetter()
89 89 var refType = sourceRefType;
90 90 title = 'Changes from {0}: {1}'.format(refType, normalizedRef);
91 91 }
92 92
93 93 return [title, desc]
94 94 };
95 95
96 96
97 97 window.ReviewersController = function () {
98 98 var self = this;
99 99 this.$loadingIndicator = $('.calculate-reviewers');
100 100 this.$reviewRulesContainer = $('#review_rules');
101 101 this.$rulesList = this.$reviewRulesContainer.find('.pr-reviewer-rules');
102 102 this.$userRule = $('.pr-user-rule-container');
103 103 this.$reviewMembers = $('#review_members');
104 104 this.$observerMembers = $('#observer_members');
105 105
106 106 this.currentRequest = null;
107 107 this.diffData = null;
108 108 this.enabledRules = [];
109 109 // sync with db.py entries
110 110 this.ROLE_REVIEWER = 'reviewer';
111 111 this.ROLE_OBSERVER = 'observer'
112 112
113 113 //dummy handler, we might register our own later
114 114 this.diffDataHandler = function (data) {};
115 115
116 116 this.defaultForbidUsers = function () {
117 117 return [
118 118 {
119 119 'username': 'default',
120 120 'user_id': templateContext.default_user.user_id
121 121 }
122 122 ];
123 123 };
124 124
125 125 // init default forbidden users
126 126 this.forbidUsers = this.defaultForbidUsers();
127 127
128 128 this.hideReviewRules = function () {
129 129 self.$reviewRulesContainer.hide();
130 130 $(self.$userRule.selector).hide();
131 131 };
132 132
133 133 this.showReviewRules = function () {
134 134 self.$reviewRulesContainer.show();
135 135 $(self.$userRule.selector).show();
136 136 };
137 137
138 138 this.addRule = function (ruleText) {
139 139 self.showReviewRules();
140 140 self.enabledRules.push(ruleText);
141 141 return '<div>- {0}</div>'.format(ruleText)
142 142 };
143 143
144 144 this.increaseCounter = function(role) {
145 145 if (role === self.ROLE_REVIEWER) {
146 146 var $elem = $('#reviewers-cnt')
147 147 var cnt = parseInt($elem.data('count') || 0)
148 148 cnt +=1
149 149 $elem.html(cnt);
150 150 $elem.data('count', cnt);
151 151 }
152 152 else if (role === self.ROLE_OBSERVER) {
153 153 var $elem = $('#observers-cnt');
154 154 var cnt = parseInt($elem.data('count') || 0)
155 155 cnt +=1
156 156 $elem.html(cnt);
157 157 $elem.data('count', cnt);
158 158 }
159 159 }
160 160
161 161 this.resetCounter = function () {
162 162 var $elem = $('#reviewers-cnt');
163 163
164 164 $elem.data('count', 0);
165 165 $elem.html(0);
166 166
167 167 var $elem = $('#observers-cnt');
168 168
169 169 $elem.data('count', 0);
170 170 $elem.html(0);
171 171 }
172 172
173 173 this.loadReviewRules = function (data) {
174 174 self.diffData = data;
175 175
176 176 // reset forbidden Users
177 177 this.forbidUsers = self.defaultForbidUsers();
178 178
179 179 // reset state of review rules
180 180 self.$rulesList.html('');
181 181
182 182 if (!data || data.rules === undefined || $.isEmptyObject(data.rules)) {
183 183 // default rule, case for older repo that don't have any rules stored
184 184 self.$rulesList.append(
185 185 self.addRule(
186 186 _gettext('All reviewers must vote.'))
187 187 );
188 188 return self.forbidUsers
189 189 }
190 190
191 191 if (data.rules.voting !== undefined) {
192 192 if (data.rules.voting < 0) {
193 193 self.$rulesList.append(
194 194 self.addRule(
195 195 _gettext('All individual reviewers must vote.'))
196 196 )
197 197 } else if (data.rules.voting === 1) {
198 198 self.$rulesList.append(
199 199 self.addRule(
200 200 _gettext('At least {0} reviewer must vote.').format(data.rules.voting))
201 201 )
202 202
203 203 } else {
204 204 self.$rulesList.append(
205 205 self.addRule(
206 206 _gettext('At least {0} reviewers must vote.').format(data.rules.voting))
207 207 )
208 208 }
209 209 }
210 210
211 211 if (data.rules.voting_groups !== undefined) {
212 212 $.each(data.rules.voting_groups, function (index, rule_data) {
213 213 self.$rulesList.append(
214 214 self.addRule(rule_data.text)
215 215 )
216 216 });
217 217 }
218 218
219 219 if (data.rules.use_code_authors_for_review) {
220 220 self.$rulesList.append(
221 221 self.addRule(
222 222 _gettext('Reviewers picked from source code changes.'))
223 223 )
224 224 }
225 225
226 226 if (data.rules.forbid_adding_reviewers) {
227 227 $('#add_reviewer_input').remove();
228 228 self.$rulesList.append(
229 229 self.addRule(
230 230 _gettext('Adding new reviewers is forbidden.'))
231 231 )
232 232 }
233 233
234 234 if (data.rules.forbid_author_to_review) {
235 235 self.forbidUsers.push(data.rules_data.pr_author);
236 236 self.$rulesList.append(
237 237 self.addRule(
238 238 _gettext('Author is not allowed to be a reviewer.'))
239 239 )
240 240 }
241 241
242 242 if (data.rules.forbid_commit_author_to_review) {
243 243
244 244 if (data.rules_data.forbidden_users) {
245 245 $.each(data.rules_data.forbidden_users, function (index, member_data) {
246 246 self.forbidUsers.push(member_data)
247 247 });
248 248 }
249 249
250 250 self.$rulesList.append(
251 251 self.addRule(
252 252 _gettext('Commit Authors are not allowed to be a reviewer.'))
253 253 )
254 254 }
255 255
256 256 // we don't have any rules set, so we inform users about it
257 257 if (self.enabledRules.length === 0) {
258 258 self.addRule(
259 259 _gettext('No review rules set.'))
260 260 }
261 261
262 262 return self.forbidUsers
263 263 };
264 264
265 265 this.emptyTables = function () {
266 266 self.emptyReviewersTable();
267 267 self.emptyObserversTable();
268 268
269 269 // Also reset counters.
270 270 self.resetCounter();
271 271 }
272 272
273 273 this.emptyReviewersTable = function (withText) {
274 274 self.$reviewMembers.empty();
275 275 if (withText !== undefined) {
276 276 self.$reviewMembers.html(withText)
277 277 }
278 278 };
279 279
280 280 this.emptyObserversTable = function (withText) {
281 281 self.$observerMembers.empty();
282 282 if (withText !== undefined) {
283 283 self.$observerMembers.html(withText)
284 284 }
285 285 }
286 286
287 287 this.loadDefaultReviewers = function (sourceRepo, sourceRef, targetRepo, targetRef) {
288 288
289 289 if (self.currentRequest) {
290 290 // make sure we cleanup old running requests before triggering this again
291 291 self.currentRequest.abort();
292 292 }
293 293
294 294 self.$loadingIndicator.show();
295 295
296 296 // reset reviewer/observe members
297 297 self.emptyTables();
298 298
299 299 prButtonLock(true, null, 'reviewers');
300 300 $('#user').hide(); // hide user autocomplete before load
301 301 $('#observer').hide(); //hide observer autocomplete before load
302 302
303 303 // lock PR button, so we cannot send PR before it's calculated
304 304 prButtonLock(true, _gettext('Loading diff ...'), 'compare');
305 305
306 306 if (sourceRef.length !== 3 || targetRef.length !== 3) {
307 307 // don't load defaults in case we're missing some refs...
308 308 self.$loadingIndicator.hide();
309 309 return
310 310 }
311 311
312 312 var url = pyroutes.url('repo_default_reviewers_data',
313 313 {
314 314 'repo_name': templateContext.repo_name,
315 315 'source_repo': sourceRepo,
316 'source_ref_type': sourceRef[0],
317 'source_ref_name': sourceRef[1],
316 318 'source_ref': sourceRef[2],
317 319 'target_repo': targetRepo,
318 'target_ref': targetRef[2]
320 'target_ref': targetRef[2],
321 'target_ref_type': sourceRef[0],
322 'target_ref_name': sourceRef[1]
319 323 });
320 324
321 325 self.currentRequest = $.ajax({
322 326 url: url,
323 327 headers: {'X-PARTIAL-XHR': true},
324 328 type: 'GET',
325 329 success: function (data) {
326 330
327 331 self.currentRequest = null;
328 332
329 333 // review rules
330 334 self.loadReviewRules(data);
331 335 self.handleDiffData(data["diff_info"]);
332 336
333 337 for (var i = 0; i < data.reviewers.length; i++) {
334 338 var reviewer = data.reviewers[i];
335 339 // load reviewer rules from the repo data
336 340 self.addMember(reviewer, reviewer.reasons, reviewer.mandatory, reviewer.role);
337 341 }
338 342
339 343
340 344 self.$loadingIndicator.hide();
341 345 prButtonLock(false, null, 'reviewers');
342 346
343 347 $('#user').show(); // show user autocomplete before load
344 348 $('#observer').show(); // show observer autocomplete before load
345 349
346 350 var commitElements = data["diff_info"]['commits'];
347 351
348 352 if (commitElements.length === 0) {
349 353 var noCommitsMsg = '<span class="alert-text-warning">{0}</span>'.format(
350 354 _gettext('There are no commits to merge.'));
351 355 prButtonLock(true, noCommitsMsg, 'all');
352 356
353 357 } else {
354 358 // un-lock PR button, so we cannot send PR before it's calculated
355 359 prButtonLock(false, null, 'compare');
356 360 }
357 361
358 362 },
359 363 error: function (jqXHR, textStatus, errorThrown) {
360 364 var prefix = "Loading diff and reviewers/observers failed\n"
361 365 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
362 366 ajaxErrorSwal(message);
363 367 }
364 368 });
365 369
366 370 };
367 371
368 372 // check those, refactor
369 373 this.removeMember = function (reviewer_id, mark_delete) {
370 374 var reviewer = $('#reviewer_{0}'.format(reviewer_id));
371 375
372 376 if (typeof (mark_delete) === undefined) {
373 377 mark_delete = false;
374 378 }
375 379
376 380 if (mark_delete === true) {
377 381 if (reviewer) {
378 382 // now delete the input
379 383 $('#reviewer_{0} input'.format(reviewer_id)).remove();
380 384 $('#reviewer_{0}_rules input'.format(reviewer_id)).remove();
381 385 // mark as to-delete
382 386 var obj = $('#reviewer_{0}_name'.format(reviewer_id));
383 387 obj.addClass('to-delete');
384 388 obj.css({"text-decoration": "line-through", "opacity": 0.5});
385 389 }
386 390 } else {
387 391 $('#reviewer_{0}'.format(reviewer_id)).remove();
388 392 }
389 393 };
390 394
391 395 this.addMember = function (reviewer_obj, reasons, mandatory, role) {
392 396
393 397 var id = reviewer_obj.user_id;
394 398 var username = reviewer_obj.username;
395 399
396 400 reasons = reasons || [];
397 401 mandatory = mandatory || false;
398 402 role = role || self.ROLE_REVIEWER
399 403
400 404 // register current set IDS to check if we don't have this ID already in
401 405 // and prevent duplicates
402 406 var currentIds = [];
403 407
404 408 $.each($('.reviewer_entry'), function (index, value) {
405 409 currentIds.push($(value).data('reviewerUserId'))
406 410 })
407 411
408 412 var userAllowedReview = function (userId) {
409 413 var allowed = true;
410 414 $.each(self.forbidUsers, function (index, member_data) {
411 415 if (parseInt(userId) === member_data['user_id']) {
412 416 allowed = false;
413 417 return false // breaks the loop
414 418 }
415 419 });
416 420 return allowed
417 421 };
418 422
419 423 var userAllowed = userAllowedReview(id);
420 424
421 425 if (!userAllowed) {
422 426 alert(_gettext('User `{0}` not allowed to be a reviewer').format(username));
423 427 } else {
424 428 // only add if it's not there
425 429 var alreadyReviewer = currentIds.indexOf(id) != -1;
426 430
427 431 if (alreadyReviewer) {
428 432 alert(_gettext('User `{0}` already in reviewers/observers').format(username));
429 433 } else {
430 434
431 435 var reviewerEntry = renderTemplate('reviewMemberEntry', {
432 436 'member': reviewer_obj,
433 437 'mandatory': mandatory,
434 438 'role': role,
435 439 'reasons': reasons,
436 440 'allowed_to_update': true,
437 441 'review_status': 'not_reviewed',
438 442 'review_status_label': _gettext('Not Reviewed'),
439 443 'user_group': reviewer_obj.user_group,
440 444 'create': true,
441 445 'rule_show': true,
442 446 })
443 447
444 448 if (role === self.ROLE_REVIEWER) {
445 449 $(self.$reviewMembers.selector).append(reviewerEntry);
446 450 self.increaseCounter(self.ROLE_REVIEWER);
447 451 $('#reviewer-empty-msg').remove()
448 452 }
449 453 else if (role === self.ROLE_OBSERVER) {
450 454 $(self.$observerMembers.selector).append(reviewerEntry);
451 455 self.increaseCounter(self.ROLE_OBSERVER);
452 456 $('#observer-empty-msg').remove();
453 457 }
454 458
455 459 tooltipActivate();
456 460 }
457 461 }
458 462
459 463 };
460 464
461 465 this.updateReviewers = function (repo_name, pull_request_id, role) {
462 466 if (role === 'reviewer') {
463 467 var postData = $('#reviewers input').serialize();
464 468 _updatePullRequest(repo_name, pull_request_id, postData);
465 469 } else if (role === 'observer') {
466 470 var postData = $('#observers input').serialize();
467 471 _updatePullRequest(repo_name, pull_request_id, postData);
468 472 }
469 473 };
470 474
471 475 this.handleDiffData = function (data) {
472 476 self.diffDataHandler(data)
473 477 }
474 478 };
475 479
476 480
477 481 var _updatePullRequest = function(repo_name, pull_request_id, postData) {
478 482 var url = pyroutes.url(
479 483 'pullrequest_update',
480 484 {"repo_name": repo_name, "pull_request_id": pull_request_id});
481 485 if (typeof postData === 'string' ) {
482 486 postData += '&csrf_token=' + CSRF_TOKEN;
483 487 } else {
484 488 postData.csrf_token = CSRF_TOKEN;
485 489 }
486 490
487 491 var success = function(o) {
488 492 var redirectUrl = o['redirect_url'];
489 493 if (redirectUrl !== undefined && redirectUrl !== null && redirectUrl !== '') {
490 494 window.location = redirectUrl;
491 495 } else {
492 496 window.location.reload();
493 497 }
494 498 };
495 499
496 500 ajaxPOST(url, postData, success);
497 501 };
498 502
499 503 /**
500 504 * PULL REQUEST update commits
501 505 */
502 506 var updateCommits = function(repo_name, pull_request_id, force) {
503 507 var postData = {
504 508 'update_commits': true
505 509 };
506 510 if (force !== undefined && force === true) {
507 511 postData['force_refresh'] = true
508 512 }
509 513 _updatePullRequest(repo_name, pull_request_id, postData);
510 514 };
511 515
512 516
513 517 /**
514 518 * PULL REQUEST edit info
515 519 */
516 520 var editPullRequest = function(repo_name, pull_request_id, title, description, renderer) {
517 521 var url = pyroutes.url(
518 522 'pullrequest_update',
519 523 {"repo_name": repo_name, "pull_request_id": pull_request_id});
520 524
521 525 var postData = {
522 526 'title': title,
523 527 'description': description,
524 528 'description_renderer': renderer,
525 529 'edit_pull_request': true,
526 530 'csrf_token': CSRF_TOKEN
527 531 };
528 532 var success = function(o) {
529 533 window.location.reload();
530 534 };
531 535 ajaxPOST(url, postData, success);
532 536 };
533 537
534 538
535 539 /**
536 540 * autocomplete handler for reviewers/observers
537 541 */
538 542 var autoCompleteHandler = function (inputId, controller, role) {
539 543
540 544 return function (element, data) {
541 545 var mandatory = false;
542 546 var reasons = [_gettext('added manually by "{0}"').format(
543 547 templateContext.rhodecode_user.username)];
544 548
545 549 // add whole user groups
546 550 if (data.value_type == 'user_group') {
547 551 reasons.push(_gettext('member of "{0}"').format(data.value_display));
548 552
549 553 $.each(data.members, function (index, member_data) {
550 554 var reviewer = member_data;
551 555 reviewer['user_id'] = member_data['id'];
552 556 reviewer['gravatar_link'] = member_data['icon_link'];
553 557 reviewer['user_link'] = member_data['profile_link'];
554 558 reviewer['rules'] = [];
555 559 controller.addMember(reviewer, reasons, mandatory, role);
556 560 })
557 561 }
558 562 // add single user
559 563 else {
560 564 var reviewer = data;
561 565 reviewer['user_id'] = data['id'];
562 566 reviewer['gravatar_link'] = data['icon_link'];
563 567 reviewer['user_link'] = data['profile_link'];
564 568 reviewer['rules'] = [];
565 569 controller.addMember(reviewer, reasons, mandatory, role);
566 570 }
567 571
568 572 $(inputId).val('');
569 573 }
570 574 }
571 575
572 576 /**
573 577 * Reviewer autocomplete
574 578 */
575 579 var ReviewerAutoComplete = function (inputId, controller) {
576 580 var self = this;
577 581 self.controller = controller;
578 582 self.inputId = inputId;
579 583 var handler = autoCompleteHandler(inputId, controller, controller.ROLE_REVIEWER);
580 584
581 585 $(inputId).autocomplete({
582 586 serviceUrl: pyroutes.url('user_autocomplete_data'),
583 587 minChars: 2,
584 588 maxHeight: 400,
585 589 deferRequestBy: 300, //miliseconds
586 590 showNoSuggestionNotice: true,
587 591 tabDisabled: true,
588 592 autoSelectFirst: true,
589 593 params: {
590 594 user_id: templateContext.rhodecode_user.user_id,
591 595 user_groups: true,
592 596 user_groups_expand: true,
593 597 skip_default_user: true
594 598 },
595 599 formatResult: autocompleteFormatResult,
596 600 lookupFilter: autocompleteFilterResult,
597 601 onSelect: handler
598 602 });
599 603 };
600 604
601 605 /**
602 606 * Observers autocomplete
603 607 */
604 608 var ObserverAutoComplete = function(inputId, controller) {
605 609 var self = this;
606 610 self.controller = controller;
607 611 self.inputId = inputId;
608 612 var handler = autoCompleteHandler(inputId, controller, controller.ROLE_OBSERVER);
609 613
610 614 $(inputId).autocomplete({
611 615 serviceUrl: pyroutes.url('user_autocomplete_data'),
612 616 minChars: 2,
613 617 maxHeight: 400,
614 618 deferRequestBy: 300, //miliseconds
615 619 showNoSuggestionNotice: true,
616 620 tabDisabled: true,
617 621 autoSelectFirst: true,
618 622 params: {
619 623 user_id: templateContext.rhodecode_user.user_id,
620 624 user_groups: true,
621 625 user_groups_expand: true,
622 626 skip_default_user: true
623 627 },
624 628 formatResult: autocompleteFormatResult,
625 629 lookupFilter: autocompleteFilterResult,
626 630 onSelect: handler
627 631 });
628 632 }
629 633
630 634
631 635 window.VersionController = function () {
632 636 var self = this;
633 637 this.$verSource = $('input[name=ver_source]');
634 638 this.$verTarget = $('input[name=ver_target]');
635 639 this.$showVersionDiff = $('#show-version-diff');
636 640
637 641 this.adjustRadioSelectors = function (curNode) {
638 642 var getVal = function (item) {
639 643 if (item === 'latest') {
640 644 return Number.MAX_SAFE_INTEGER
641 645 }
642 646 else {
643 647 return parseInt(item)
644 648 }
645 649 };
646 650
647 651 var curVal = getVal($(curNode).val());
648 652 var cleared = false;
649 653
650 654 $.each(self.$verSource, function (index, value) {
651 655 var elVal = getVal($(value).val());
652 656
653 657 if (elVal > curVal) {
654 658 if ($(value).is(':checked')) {
655 659 cleared = true;
656 660 }
657 661 $(value).attr('disabled', 'disabled');
658 662 $(value).removeAttr('checked');
659 663 $(value).css({'opacity': 0.1});
660 664 }
661 665 else {
662 666 $(value).css({'opacity': 1});
663 667 $(value).removeAttr('disabled');
664 668 }
665 669 });
666 670
667 671 if (cleared) {
668 672 // if we unchecked an active, set the next one to same loc.
669 673 $(this.$verSource).filter('[value={0}]'.format(
670 674 curVal)).attr('checked', 'checked');
671 675 }
672 676
673 677 self.setLockAction(false,
674 678 $(curNode).data('verPos'),
675 679 $(this.$verSource).filter(':checked').data('verPos')
676 680 );
677 681 };
678 682
679 683
680 684 this.attachVersionListener = function () {
681 685 self.$verTarget.change(function (e) {
682 686 self.adjustRadioSelectors(this)
683 687 });
684 688 self.$verSource.change(function (e) {
685 689 self.adjustRadioSelectors(self.$verTarget.filter(':checked'))
686 690 });
687 691 };
688 692
689 693 this.init = function () {
690 694
691 695 var curNode = self.$verTarget.filter(':checked');
692 696 self.adjustRadioSelectors(curNode);
693 697 self.setLockAction(true);
694 698 self.attachVersionListener();
695 699
696 700 };
697 701
698 702 this.setLockAction = function (state, selectedVersion, otherVersion) {
699 703 var $showVersionDiff = this.$showVersionDiff;
700 704
701 705 if (state) {
702 706 $showVersionDiff.attr('disabled', 'disabled');
703 707 $showVersionDiff.addClass('disabled');
704 708 $showVersionDiff.html($showVersionDiff.data('labelTextLocked'));
705 709 }
706 710 else {
707 711 $showVersionDiff.removeAttr('disabled');
708 712 $showVersionDiff.removeClass('disabled');
709 713
710 714 if (selectedVersion == otherVersion) {
711 715 $showVersionDiff.html($showVersionDiff.data('labelTextShow'));
712 716 } else {
713 717 $showVersionDiff.html($showVersionDiff.data('labelTextDiff'));
714 718 }
715 719 }
716 720
717 721 };
718 722
719 723 this.showVersionDiff = function () {
720 724 var target = self.$verTarget.filter(':checked');
721 725 var source = self.$verSource.filter(':checked');
722 726
723 727 if (target.val() && source.val()) {
724 728 var params = {
725 729 'pull_request_id': templateContext.pull_request_data.pull_request_id,
726 730 'repo_name': templateContext.repo_name,
727 731 'version': target.val(),
728 732 'from_version': source.val()
729 733 };
730 734 window.location = pyroutes.url('pullrequest_show', params)
731 735 }
732 736
733 737 return false;
734 738 };
735 739
736 740 this.toggleVersionView = function (elem) {
737 741
738 742 if (this.$showVersionDiff.is(':visible')) {
739 743 $('.version-pr').hide();
740 744 this.$showVersionDiff.hide();
741 745 $(elem).html($(elem).data('toggleOn'))
742 746 } else {
743 747 $('.version-pr').show();
744 748 this.$showVersionDiff.show();
745 749 $(elem).html($(elem).data('toggleOff'))
746 750 }
747 751
748 752 return false
749 753 };
750 754
751 755 };
752 756
753 757
754 758 window.UpdatePrController = function () {
755 759 var self = this;
756 760 this.$updateCommits = $('#update_commits');
757 761 this.$updateCommitsSwitcher = $('#update_commits_switcher');
758 762
759 763 this.lockUpdateButton = function (label) {
760 764 self.$updateCommits.attr('disabled', 'disabled');
761 765 self.$updateCommitsSwitcher.attr('disabled', 'disabled');
762 766
763 767 self.$updateCommits.addClass('disabled');
764 768 self.$updateCommitsSwitcher.addClass('disabled');
765 769
766 770 self.$updateCommits.removeClass('btn-primary');
767 771 self.$updateCommitsSwitcher.removeClass('btn-primary');
768 772
769 773 self.$updateCommits.text(_gettext(label));
770 774 };
771 775
772 776 this.isUpdateLocked = function () {
773 777 return self.$updateCommits.attr('disabled') !== undefined;
774 778 };
775 779
776 780 this.updateCommits = function (curNode) {
777 781 if (self.isUpdateLocked()) {
778 782 return
779 783 }
780 784 self.lockUpdateButton(_gettext('Updating...'));
781 785 updateCommits(
782 786 templateContext.repo_name,
783 787 templateContext.pull_request_data.pull_request_id);
784 788 };
785 789
786 790 this.forceUpdateCommits = function () {
787 791 if (self.isUpdateLocked()) {
788 792 return
789 793 }
790 794 self.lockUpdateButton(_gettext('Force updating...'));
791 795 var force = true;
792 796 updateCommits(
793 797 templateContext.repo_name,
794 798 templateContext.pull_request_data.pull_request_id, force);
795 799 };
796 800 };
797 801
798 802
799 803 /**
800 804 * Reviewer display panel
801 805 */
802 806 window.ReviewersPanel = {
803 807 editButton: null,
804 808 closeButton: null,
805 809 addButton: null,
806 810 removeButtons: null,
807 811 reviewRules: null,
808 812 setReviewers: null,
809 813 controller: null,
810 814
811 815 setSelectors: function () {
812 816 var self = this;
813 817 self.editButton = $('#open_edit_reviewers');
814 818 self.closeButton =$('#close_edit_reviewers');
815 819 self.addButton = $('#add_reviewer');
816 820 self.removeButtons = $('.reviewer_member_remove,.reviewer_member_mandatory_remove');
817 821 },
818 822
819 823 init: function (controller, reviewRules, setReviewers) {
820 824 var self = this;
821 825 self.setSelectors();
822 826
823 827 self.controller = controller;
824 828 self.reviewRules = reviewRules;
825 829 self.setReviewers = setReviewers;
826 830
827 831 self.editButton.on('click', function (e) {
828 832 self.edit();
829 833 });
830 834 self.closeButton.on('click', function (e) {
831 835 self.close();
832 836 self.renderReviewers();
833 837 });
834 838
835 839 self.renderReviewers();
836 840
837 841 },
838 842
839 843 renderReviewers: function () {
840 844 var self = this;
841 845
842 846 if (self.setReviewers.reviewers === undefined) {
843 847 return
844 848 }
845 849 if (self.setReviewers.reviewers.length === 0) {
846 850 self.controller.emptyReviewersTable('<tr id="reviewer-empty-msg"><td colspan="6">No reviewers</td></tr>');
847 851 return
848 852 }
849 853
850 854 self.controller.emptyReviewersTable();
851 855
852 856 $.each(self.setReviewers.reviewers, function (key, val) {
853 857
854 858 var member = val;
855 859 if (member.role === self.controller.ROLE_REVIEWER) {
856 860 var entry = renderTemplate('reviewMemberEntry', {
857 861 'member': member,
858 862 'mandatory': member.mandatory,
859 863 'role': member.role,
860 864 'reasons': member.reasons,
861 865 'allowed_to_update': member.allowed_to_update,
862 866 'review_status': member.review_status,
863 867 'review_status_label': member.review_status_label,
864 868 'user_group': member.user_group,
865 869 'create': false
866 870 });
867 871
868 872 $(self.controller.$reviewMembers.selector).append(entry)
869 873 }
870 874 });
871 875
872 876 tooltipActivate();
873 877 },
874 878
875 879 edit: function (event) {
876 880 var self = this;
877 881 self.editButton.hide();
878 882 self.closeButton.show();
879 883 self.addButton.show();
880 884 $(self.removeButtons.selector).css('visibility', 'visible');
881 885 // review rules
882 886 self.controller.loadReviewRules(this.reviewRules);
883 887 },
884 888
885 889 close: function (event) {
886 890 var self = this;
887 891 this.editButton.show();
888 892 this.closeButton.hide();
889 893 this.addButton.hide();
890 894 $(this.removeButtons.selector).css('visibility', 'hidden');
891 895 // hide review rules
892 896 self.controller.hideReviewRules();
893 897 }
894 898 };
895 899
896 900 /**
897 901 * Reviewer display panel
898 902 */
899 903 window.ObserversPanel = {
900 904 editButton: null,
901 905 closeButton: null,
902 906 addButton: null,
903 907 removeButtons: null,
904 908 reviewRules: null,
905 909 setReviewers: null,
906 910 controller: null,
907 911
908 912 setSelectors: function () {
909 913 var self = this;
910 914 self.editButton = $('#open_edit_observers');
911 915 self.closeButton =$('#close_edit_observers');
912 916 self.addButton = $('#add_observer');
913 917 self.removeButtons = $('.observer_member_remove,.observer_member_mandatory_remove');
914 918 },
915 919
916 920 init: function (controller, reviewRules, setReviewers) {
917 921 var self = this;
918 922 self.setSelectors();
919 923
920 924 self.controller = controller;
921 925 self.reviewRules = reviewRules;
922 926 self.setReviewers = setReviewers;
923 927
924 928 self.editButton.on('click', function (e) {
925 929 self.edit();
926 930 });
927 931 self.closeButton.on('click', function (e) {
928 932 self.close();
929 933 self.renderObservers();
930 934 });
931 935
932 936 self.renderObservers();
933 937
934 938 },
935 939
936 940 renderObservers: function () {
937 941 var self = this;
938 942 if (self.setReviewers.observers === undefined) {
939 943 return
940 944 }
941 945 if (self.setReviewers.observers.length === 0) {
942 946 self.controller.emptyObserversTable('<tr id="observer-empty-msg"><td colspan="6">No observers</td></tr>');
943 947 return
944 948 }
945 949
946 950 self.controller.emptyObserversTable();
947 951
948 952 $.each(self.setReviewers.observers, function (key, val) {
949 953 var member = val;
950 954 if (member.role === self.controller.ROLE_OBSERVER) {
951 955 var entry = renderTemplate('reviewMemberEntry', {
952 956 'member': member,
953 957 'mandatory': member.mandatory,
954 958 'role': member.role,
955 959 'reasons': member.reasons,
956 960 'allowed_to_update': member.allowed_to_update,
957 961 'review_status': member.review_status,
958 962 'review_status_label': member.review_status_label,
959 963 'user_group': member.user_group,
960 964 'create': false
961 965 });
962 966
963 967 $(self.controller.$observerMembers.selector).append(entry)
964 968 }
965 969 });
966 970
967 971 tooltipActivate();
968 972 },
969 973
970 974 edit: function (event) {
971 975 this.editButton.hide();
972 976 this.closeButton.show();
973 977 this.addButton.show();
974 978 $(this.removeButtons.selector).css('visibility', 'visible');
975 979 },
976 980
977 981 close: function (event) {
978 982 this.editButton.show();
979 983 this.closeButton.hide();
980 984 this.addButton.hide();
981 985 $(this.removeButtons.selector).css('visibility', 'hidden');
982 986 }
983 987
984 988 };
985 989
986 990 window.PRDetails = {
987 991 editButton: null,
988 992 closeButton: null,
989 993 deleteButton: null,
990 994 viewFields: null,
991 995 editFields: null,
992 996
993 997 setSelectors: function () {
994 998 var self = this;
995 999 self.editButton = $('#open_edit_pullrequest')
996 1000 self.closeButton = $('#close_edit_pullrequest')
997 1001 self.deleteButton = $('#delete_pullrequest')
998 1002 self.viewFields = $('#pr-desc, #pr-title')
999 1003 self.editFields = $('#pr-desc-edit, #pr-title-edit, .pr-save')
1000 1004 },
1001 1005
1002 1006 init: function () {
1003 1007 var self = this;
1004 1008 self.setSelectors();
1005 1009 self.editButton.on('click', function (e) {
1006 1010 self.edit();
1007 1011 });
1008 1012 self.closeButton.on('click', function (e) {
1009 1013 self.view();
1010 1014 });
1011 1015 },
1012 1016
1013 1017 edit: function (event) {
1014 1018 var cmInstance = $('#pr-description-input').get(0).MarkupForm.cm;
1015 1019 this.viewFields.hide();
1016 1020 this.editButton.hide();
1017 1021 this.deleteButton.hide();
1018 1022 this.closeButton.show();
1019 1023 this.editFields.show();
1020 1024 cmInstance.refresh();
1021 1025 },
1022 1026
1023 1027 view: function (event) {
1024 1028 this.editButton.show();
1025 1029 this.deleteButton.show();
1026 1030 this.editFields.hide();
1027 1031 this.closeButton.hide();
1028 1032 this.viewFields.show();
1029 1033 }
1030 1034 };
1031 1035
1032 1036 /**
1033 1037 * OnLine presence using channelstream
1034 1038 */
1035 1039 window.ReviewerPresenceController = function (channel) {
1036 1040 var self = this;
1037 1041 this.channel = channel;
1038 1042 this.users = {};
1039 1043
1040 1044 this.storeUsers = function (users) {
1041 1045 self.users = {}
1042 1046 $.each(users, function (index, value) {
1043 1047 var userId = value.state.id;
1044 1048 self.users[userId] = value.state;
1045 1049 })
1046 1050 }
1047 1051
1048 1052 this.render = function () {
1049 1053 $.each($('.reviewer_entry'), function (index, value) {
1050 1054 var userData = $(value).data();
1051 1055 if (self.users[userData.reviewerUserId] !== undefined) {
1052 1056 $(value).find('.presence-state').show();
1053 1057 } else {
1054 1058 $(value).find('.presence-state').hide();
1055 1059 }
1056 1060 })
1057 1061 };
1058 1062
1059 1063 this.handlePresence = function (data) {
1060 1064 if (data.type == 'presence' && data.channel === self.channel) {
1061 1065 this.storeUsers(data.users);
1062 1066 this.render()
1063 1067 }
1064 1068 };
1065 1069
1066 1070 this.handleChannelUpdate = function (data) {
1067 1071 if (data.channel === this.channel) {
1068 1072 this.storeUsers(data.state.users);
1069 1073 this.render()
1070 1074 }
1071 1075
1072 1076 };
1073 1077
1074 1078 /* subscribe to the current presence */
1075 1079 $.Topic('/connection_controller/presence').subscribe(this.handlePresence.bind(this));
1076 1080 /* subscribe to updates e.g connect/disconnect */
1077 1081 $.Topic('/connection_controller/channel_update').subscribe(this.handleChannelUpdate.bind(this));
1078 1082
1079 1083 };
1080 1084
1081 1085 window.refreshComments = function (version) {
1082 1086 version = version || templateContext.pull_request_data.pull_request_version || '';
1083 1087
1084 1088 // Pull request case
1085 1089 if (templateContext.pull_request_data.pull_request_id !== null) {
1086 1090 var params = {
1087 1091 'pull_request_id': templateContext.pull_request_data.pull_request_id,
1088 1092 'repo_name': templateContext.repo_name,
1089 1093 'version': version,
1090 1094 };
1091 1095 var loadUrl = pyroutes.url('pullrequest_comments', params);
1092 1096 } // commit case
1093 1097 else {
1094 1098 return
1095 1099 }
1096 1100
1097 1101 var currentIDs = []
1098 1102 $.each($('.comment'), function (idx, element) {
1099 1103 currentIDs.push($(element).data('commentId'));
1100 1104 });
1101 1105 var data = {"comments": currentIDs};
1102 1106
1103 1107 var $targetElem = $('.comments-content-table');
1104 1108 $targetElem.css('opacity', 0.3);
1105 1109
1106 1110 var success = function (data) {
1107 1111 var $counterElem = $('#comments-count');
1108 1112 var newCount = $(data).data('counter');
1109 1113 if (newCount !== undefined) {
1110 1114 var callback = function () {
1111 1115 $counterElem.animate({'opacity': 1.00}, 200)
1112 1116 $counterElem.html(newCount);
1113 1117 };
1114 1118 $counterElem.animate({'opacity': 0.15}, 200, callback);
1115 1119 }
1116 1120
1117 1121 $targetElem.css('opacity', 1);
1118 1122 $targetElem.html(data);
1119 1123 tooltipActivate();
1120 1124 }
1121 1125
1122 1126 ajaxPOST(loadUrl, data, success, null, {})
1123 1127
1124 1128 }
1125 1129
1126 1130 window.refreshTODOs = function (version) {
1127 1131 version = version || templateContext.pull_request_data.pull_request_version || '';
1128 1132 // Pull request case
1129 1133 if (templateContext.pull_request_data.pull_request_id !== null) {
1130 1134 var params = {
1131 1135 'pull_request_id': templateContext.pull_request_data.pull_request_id,
1132 1136 'repo_name': templateContext.repo_name,
1133 1137 'version': version,
1134 1138 };
1135 1139 var loadUrl = pyroutes.url('pullrequest_comments', params);
1136 1140 } // commit case
1137 1141 else {
1138 1142 return
1139 1143 }
1140 1144
1141 1145 var currentIDs = []
1142 1146 $.each($('.comment'), function (idx, element) {
1143 1147 currentIDs.push($(element).data('commentId'));
1144 1148 });
1145 1149
1146 1150 var data = {"comments": currentIDs};
1147 1151 var $targetElem = $('.todos-content-table');
1148 1152 $targetElem.css('opacity', 0.3);
1149 1153
1150 1154 var success = function (data) {
1151 1155 var $counterElem = $('#todos-count')
1152 1156 var newCount = $(data).data('counter');
1153 1157 if (newCount !== undefined) {
1154 1158 var callback = function () {
1155 1159 $counterElem.animate({'opacity': 1.00}, 200)
1156 1160 $counterElem.html(newCount);
1157 1161 };
1158 1162 $counterElem.animate({'opacity': 0.15}, 200, callback);
1159 1163 }
1160 1164
1161 1165 $targetElem.css('opacity', 1);
1162 1166 $targetElem.html(data);
1163 1167 tooltipActivate();
1164 1168 }
1165 1169
1166 1170 ajaxPOST(loadUrl, data, success, null, {})
1167 1171
1168 1172 }
1169 1173
1170 1174 window.refreshAllComments = function (version) {
1171 1175 version = version || templateContext.pull_request_data.pull_request_version || '';
1172 1176
1173 1177 refreshComments(version);
1174 1178 refreshTODOs(version);
1175 1179 };
1176 1180
1177 1181 window.sidebarComment = function (commentId) {
1178 1182 var jsonData = $('#commentHovercard{0}'.format(commentId)).data('commentJsonB64');
1179 1183 if (!jsonData) {
1180 1184 return 'Failed to load comment {0}'.format(commentId)
1181 1185 }
1182 1186 var funcData = JSON.parse(atob(jsonData));
1183 1187 return renderTemplate('sideBarCommentHovercard', funcData)
1184 1188 };
General Comments 0
You need to be logged in to leave comments. Login now