##// END OF EJS Templates
pull-requests: expose commit versions in the pull-request commit list. Fixes #5642
milka -
r4615:ca0827b2 default
parent child Browse files
Show More
@@ -1,1854 +1,1857 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict)
29 29
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33
34 34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 35 from rhodecode.lib.base import vcs_operation_context
36 36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 37 from rhodecode.lib.exceptions import CommentVersionMismatch
38 38 from rhodecode.lib.ext_json import json
39 39 from rhodecode.lib.auth import (
40 40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 41 NotAnonymous, CSRFRequired)
42 42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode, safe_int, aslist
43 43 from rhodecode.lib.vcs.backends.base import (
44 44 EmptyCommit, UpdateFailureReason, unicode_to_reference)
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
47 47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 48 from rhodecode.model.comment import CommentsModel
49 49 from rhodecode.model.db import (
50 50 func, false, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository,
51 51 PullRequestReviewers)
52 52 from rhodecode.model.forms import PullRequestForm
53 53 from rhodecode.model.meta import Session
54 54 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
55 55 from rhodecode.model.scm import ScmModel
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class RepoPullRequestsView(RepoAppView, DataGridAppView):
61 61
62 62 def load_default_context(self):
63 63 c = self._get_local_tmpl_context(include_app_defaults=True)
64 64 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
65 65 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
66 66 # backward compat., we use for OLD PRs a plain renderer
67 67 c.renderer = 'plain'
68 68 return c
69 69
70 70 def _get_pull_requests_list(
71 71 self, repo_name, source, filter_type, opened_by, statuses):
72 72
73 73 draw, start, limit = self._extract_chunk(self.request)
74 74 search_q, order_by, order_dir = self._extract_ordering(self.request)
75 75 _render = self.request.get_partial_renderer(
76 76 'rhodecode:templates/data_table/_dt_elements.mako')
77 77
78 78 # pagination
79 79
80 80 if filter_type == 'awaiting_review':
81 81 pull_requests = PullRequestModel().get_awaiting_review(
82 82 repo_name, search_q=search_q, source=source, opened_by=opened_by,
83 83 statuses=statuses, offset=start, length=limit,
84 84 order_by=order_by, order_dir=order_dir)
85 85 pull_requests_total_count = PullRequestModel().count_awaiting_review(
86 86 repo_name, search_q=search_q, source=source, statuses=statuses,
87 87 opened_by=opened_by)
88 88 elif filter_type == 'awaiting_my_review':
89 89 pull_requests = PullRequestModel().get_awaiting_my_review(
90 90 repo_name, search_q=search_q, source=source, opened_by=opened_by,
91 91 user_id=self._rhodecode_user.user_id, statuses=statuses,
92 92 offset=start, length=limit, order_by=order_by,
93 93 order_dir=order_dir)
94 94 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
95 95 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
96 96 statuses=statuses, opened_by=opened_by)
97 97 else:
98 98 pull_requests = PullRequestModel().get_all(
99 99 repo_name, search_q=search_q, source=source, opened_by=opened_by,
100 100 statuses=statuses, offset=start, length=limit,
101 101 order_by=order_by, order_dir=order_dir)
102 102 pull_requests_total_count = PullRequestModel().count_all(
103 103 repo_name, search_q=search_q, source=source, statuses=statuses,
104 104 opened_by=opened_by)
105 105
106 106 data = []
107 107 comments_model = CommentsModel()
108 108 for pr in pull_requests:
109 109 comments_count = comments_model.get_all_comments(
110 110 self.db_repo.repo_id, pull_request=pr,
111 111 include_drafts=False, count_only=True)
112 112
113 113 data.append({
114 114 'name': _render('pullrequest_name',
115 115 pr.pull_request_id, pr.pull_request_state,
116 116 pr.work_in_progress, pr.target_repo.repo_name,
117 117 short=True),
118 118 'name_raw': pr.pull_request_id,
119 119 'status': _render('pullrequest_status',
120 120 pr.calculated_review_status()),
121 121 'title': _render('pullrequest_title', pr.title, pr.description),
122 122 'description': h.escape(pr.description),
123 123 'updated_on': _render('pullrequest_updated_on',
124 124 h.datetime_to_time(pr.updated_on),
125 125 pr.versions_count),
126 126 'updated_on_raw': h.datetime_to_time(pr.updated_on),
127 127 'created_on': _render('pullrequest_updated_on',
128 128 h.datetime_to_time(pr.created_on)),
129 129 'created_on_raw': h.datetime_to_time(pr.created_on),
130 130 'state': pr.pull_request_state,
131 131 'author': _render('pullrequest_author',
132 132 pr.author.full_contact, ),
133 133 'author_raw': pr.author.full_name,
134 134 'comments': _render('pullrequest_comments', comments_count),
135 135 'comments_raw': comments_count,
136 136 'closed': pr.is_closed(),
137 137 })
138 138
139 139 data = ({
140 140 'draw': draw,
141 141 'data': data,
142 142 'recordsTotal': pull_requests_total_count,
143 143 'recordsFiltered': pull_requests_total_count,
144 144 })
145 145 return data
146 146
147 147 @LoginRequired()
148 148 @HasRepoPermissionAnyDecorator(
149 149 'repository.read', 'repository.write', 'repository.admin')
150 150 def pull_request_list(self):
151 151 c = self.load_default_context()
152 152
153 153 req_get = self.request.GET
154 154 c.source = str2bool(req_get.get('source'))
155 155 c.closed = str2bool(req_get.get('closed'))
156 156 c.my = str2bool(req_get.get('my'))
157 157 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
158 158 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
159 159
160 160 c.active = 'open'
161 161 if c.my:
162 162 c.active = 'my'
163 163 if c.closed:
164 164 c.active = 'closed'
165 165 if c.awaiting_review and not c.source:
166 166 c.active = 'awaiting'
167 167 if c.source and not c.awaiting_review:
168 168 c.active = 'source'
169 169 if c.awaiting_my_review:
170 170 c.active = 'awaiting_my'
171 171
172 172 return self._get_template_context(c)
173 173
174 174 @LoginRequired()
175 175 @HasRepoPermissionAnyDecorator(
176 176 'repository.read', 'repository.write', 'repository.admin')
177 177 def pull_request_list_data(self):
178 178 self.load_default_context()
179 179
180 180 # additional filters
181 181 req_get = self.request.GET
182 182 source = str2bool(req_get.get('source'))
183 183 closed = str2bool(req_get.get('closed'))
184 184 my = str2bool(req_get.get('my'))
185 185 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187 187
188 188 filter_type = 'awaiting_review' if awaiting_review \
189 189 else 'awaiting_my_review' if awaiting_my_review \
190 190 else None
191 191
192 192 opened_by = None
193 193 if my:
194 194 opened_by = [self._rhodecode_user.user_id]
195 195
196 196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 197 if closed:
198 198 statuses = [PullRequest.STATUS_CLOSED]
199 199
200 200 data = self._get_pull_requests_list(
201 201 repo_name=self.db_repo_name, source=source,
202 202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203 203
204 204 return data
205 205
206 206 def _is_diff_cache_enabled(self, target_repo):
207 207 caching_enabled = self._get_general_setting(
208 208 target_repo, 'rhodecode_diff_cache')
209 209 log.debug('Diff caching enabled: %s', caching_enabled)
210 210 return caching_enabled
211 211
212 212 def _get_diffset(self, source_repo_name, source_repo,
213 213 ancestor_commit,
214 214 source_ref_id, target_ref_id,
215 215 target_commit, source_commit, diff_limit, file_limit,
216 216 fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True):
217 217
218 218 target_commit_final = target_commit
219 219 source_commit_final = source_commit
220 220
221 221 if use_ancestor:
222 222 # we might want to not use it for versions
223 223 target_ref_id = ancestor_commit.raw_id
224 224 target_commit_final = ancestor_commit
225 225
226 226 vcs_diff = PullRequestModel().get_diff(
227 227 source_repo, source_ref_id, target_ref_id,
228 228 hide_whitespace_changes, diff_context)
229 229
230 230 diff_processor = diffs.DiffProcessor(
231 231 vcs_diff, format='newdiff', diff_limit=diff_limit,
232 232 file_limit=file_limit, show_full_diff=fulldiff)
233 233
234 234 _parsed = diff_processor.prepare()
235 235
236 236 diffset = codeblocks.DiffSet(
237 237 repo_name=self.db_repo_name,
238 238 source_repo_name=source_repo_name,
239 239 source_node_getter=codeblocks.diffset_node_getter(target_commit_final),
240 240 target_node_getter=codeblocks.diffset_node_getter(source_commit_final),
241 241 )
242 242 diffset = self.path_filter.render_patchset_filtered(
243 243 diffset, _parsed, target_ref_id, source_ref_id)
244 244
245 245 return diffset
246 246
247 247 def _get_range_diffset(self, source_scm, source_repo,
248 248 commit1, commit2, diff_limit, file_limit,
249 249 fulldiff, hide_whitespace_changes, diff_context):
250 250 vcs_diff = source_scm.get_diff(
251 251 commit1, commit2,
252 252 ignore_whitespace=hide_whitespace_changes,
253 253 context=diff_context)
254 254
255 255 diff_processor = diffs.DiffProcessor(
256 256 vcs_diff, format='newdiff', diff_limit=diff_limit,
257 257 file_limit=file_limit, show_full_diff=fulldiff)
258 258
259 259 _parsed = diff_processor.prepare()
260 260
261 261 diffset = codeblocks.DiffSet(
262 262 repo_name=source_repo.repo_name,
263 263 source_node_getter=codeblocks.diffset_node_getter(commit1),
264 264 target_node_getter=codeblocks.diffset_node_getter(commit2))
265 265
266 266 diffset = self.path_filter.render_patchset_filtered(
267 267 diffset, _parsed, commit1.raw_id, commit2.raw_id)
268 268
269 269 return diffset
270 270
271 271 def register_comments_vars(self, c, pull_request, versions, include_drafts=True):
272 272 comments_model = CommentsModel()
273 273
274 274 # GENERAL COMMENTS with versions #
275 275 q = comments_model._all_general_comments_of_pull_request(pull_request)
276 276 q = q.order_by(ChangesetComment.comment_id.asc())
277 277 if not include_drafts:
278 278 q = q.filter(ChangesetComment.draft == false())
279 279 general_comments = q
280 280
281 281 # pick comments we want to render at current version
282 282 c.comment_versions = comments_model.aggregate_comments(
283 283 general_comments, versions, c.at_version_num)
284 284
285 285 # INLINE COMMENTS with versions #
286 286 q = comments_model._all_inline_comments_of_pull_request(pull_request)
287 287 q = q.order_by(ChangesetComment.comment_id.asc())
288 288 if not include_drafts:
289 289 q = q.filter(ChangesetComment.draft == false())
290 290 inline_comments = q
291 291
292 292 c.inline_versions = comments_model.aggregate_comments(
293 293 inline_comments, versions, c.at_version_num, inline=True)
294 294
295 295 # Comments inline+general
296 296 if c.at_version:
297 297 c.inline_comments_flat = c.inline_versions[c.at_version_num]['display']
298 298 c.comments = c.comment_versions[c.at_version_num]['display']
299 299 else:
300 300 c.inline_comments_flat = c.inline_versions[c.at_version_num]['until']
301 301 c.comments = c.comment_versions[c.at_version_num]['until']
302 302
303 303 return general_comments, inline_comments
304 304
305 305 @LoginRequired()
306 306 @HasRepoPermissionAnyDecorator(
307 307 'repository.read', 'repository.write', 'repository.admin')
308 308 def pull_request_show(self):
309 309 _ = self.request.translate
310 310 c = self.load_default_context()
311 311
312 312 pull_request = PullRequest.get_or_404(
313 313 self.request.matchdict['pull_request_id'])
314 314 pull_request_id = pull_request.pull_request_id
315 315
316 316 c.state_progressing = pull_request.is_state_changing()
317 317 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
318 318
319 319 _new_state = {
320 320 'created': PullRequest.STATE_CREATED,
321 321 }.get(self.request.GET.get('force_state'))
322 322
323 323 if c.is_super_admin and _new_state:
324 324 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
325 325 h.flash(
326 326 _('Pull Request state was force changed to `{}`').format(_new_state),
327 327 category='success')
328 328 Session().commit()
329 329
330 330 raise HTTPFound(h.route_path(
331 331 'pullrequest_show', repo_name=self.db_repo_name,
332 332 pull_request_id=pull_request_id))
333 333
334 334 version = self.request.GET.get('version')
335 335 from_version = self.request.GET.get('from_version') or version
336 336 merge_checks = self.request.GET.get('merge_checks')
337 337 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
338 338 force_refresh = str2bool(self.request.GET.get('force_refresh'))
339 339 c.range_diff_on = self.request.GET.get('range-diff') == "1"
340 340
341 341 # fetch global flags of ignore ws or context lines
342 342 diff_context = diffs.get_diff_context(self.request)
343 343 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
344 344
345 345 (pull_request_latest,
346 346 pull_request_at_ver,
347 347 pull_request_display_obj,
348 348 at_version) = PullRequestModel().get_pr_version(
349 349 pull_request_id, version=version)
350 350
351 351 pr_closed = pull_request_latest.is_closed()
352 352
353 353 if pr_closed and (version or from_version):
354 354 # not allow to browse versions for closed PR
355 355 raise HTTPFound(h.route_path(
356 356 'pullrequest_show', repo_name=self.db_repo_name,
357 357 pull_request_id=pull_request_id))
358 358
359 359 versions = pull_request_display_obj.versions()
360
361 c.commit_versions = PullRequestModel().pr_commits_versions(versions)
362
360 363 # used to store per-commit range diffs
361 364 c.changes = collections.OrderedDict()
362 365
363 366 c.at_version = at_version
364 367 c.at_version_num = (at_version
365 368 if at_version and at_version != PullRequest.LATEST_VER
366 369 else None)
367 370
368 371 c.at_version_index = ChangesetComment.get_index_from_version(
369 372 c.at_version_num, versions)
370 373
371 374 (prev_pull_request_latest,
372 375 prev_pull_request_at_ver,
373 376 prev_pull_request_display_obj,
374 377 prev_at_version) = PullRequestModel().get_pr_version(
375 378 pull_request_id, version=from_version)
376 379
377 380 c.from_version = prev_at_version
378 381 c.from_version_num = (prev_at_version
379 382 if prev_at_version and prev_at_version != PullRequest.LATEST_VER
380 383 else None)
381 384 c.from_version_index = ChangesetComment.get_index_from_version(
382 385 c.from_version_num, versions)
383 386
384 387 # define if we're in COMPARE mode or VIEW at version mode
385 388 compare = at_version != prev_at_version
386 389
387 390 # pull_requests repo_name we opened it against
388 391 # ie. target_repo must match
389 392 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
390 393 log.warning('Mismatch between the current repo: %s, and target %s',
391 394 self.db_repo_name, pull_request_at_ver.target_repo.repo_name)
392 395 raise HTTPNotFound()
393 396
394 397 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(pull_request_at_ver)
395 398
396 399 c.pull_request = pull_request_display_obj
397 400 c.renderer = pull_request_at_ver.description_renderer or c.renderer
398 401 c.pull_request_latest = pull_request_latest
399 402
400 403 # inject latest version
401 404 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
402 405 c.versions = versions + [latest_ver]
403 406
404 407 if compare or (at_version and not at_version == PullRequest.LATEST_VER):
405 408 c.allowed_to_change_status = False
406 409 c.allowed_to_update = False
407 410 c.allowed_to_merge = False
408 411 c.allowed_to_delete = False
409 412 c.allowed_to_comment = False
410 413 c.allowed_to_close = False
411 414 else:
412 415 can_change_status = PullRequestModel().check_user_change_status(
413 416 pull_request_at_ver, self._rhodecode_user)
414 417 c.allowed_to_change_status = can_change_status and not pr_closed
415 418
416 419 c.allowed_to_update = PullRequestModel().check_user_update(
417 420 pull_request_latest, self._rhodecode_user) and not pr_closed
418 421 c.allowed_to_merge = PullRequestModel().check_user_merge(
419 422 pull_request_latest, self._rhodecode_user) and not pr_closed
420 423 c.allowed_to_delete = PullRequestModel().check_user_delete(
421 424 pull_request_latest, self._rhodecode_user) and not pr_closed
422 425 c.allowed_to_comment = not pr_closed
423 426 c.allowed_to_close = c.allowed_to_merge and not pr_closed
424 427
425 428 c.forbid_adding_reviewers = False
426 429
427 430 if pull_request_latest.reviewer_data and \
428 431 'rules' in pull_request_latest.reviewer_data:
429 432 rules = pull_request_latest.reviewer_data['rules'] or {}
430 433 try:
431 434 c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers')
432 435 except Exception:
433 436 pass
434 437
435 438 # check merge capabilities
436 439 _merge_check = MergeCheck.validate(
437 440 pull_request_latest, auth_user=self._rhodecode_user,
438 441 translator=self.request.translate,
439 442 force_shadow_repo_refresh=force_refresh)
440 443
441 444 c.pr_merge_errors = _merge_check.error_details
442 445 c.pr_merge_possible = not _merge_check.failed
443 446 c.pr_merge_message = _merge_check.merge_msg
444 447 c.pr_merge_source_commit = _merge_check.source_commit
445 448 c.pr_merge_target_commit = _merge_check.target_commit
446 449
447 450 c.pr_merge_info = MergeCheck.get_merge_conditions(
448 451 pull_request_latest, translator=self.request.translate)
449 452
450 453 c.pull_request_review_status = _merge_check.review_status
451 454 if merge_checks:
452 455 self.request.override_renderer = \
453 456 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
454 457 return self._get_template_context(c)
455 458
456 459 c.reviewers_count = pull_request.reviewers_count
457 460 c.observers_count = pull_request.observers_count
458 461
459 462 # reviewers and statuses
460 463 c.pull_request_default_reviewers_data_json = json.dumps(pull_request.reviewer_data)
461 464 c.pull_request_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
462 465 c.pull_request_set_observers_data_json = collections.OrderedDict({'observers': []})
463 466
464 467 for review_obj, member, reasons, mandatory, status in pull_request_at_ver.reviewers_statuses():
465 468 member_reviewer = h.reviewer_as_json(
466 469 member, reasons=reasons, mandatory=mandatory,
467 470 role=review_obj.role,
468 471 user_group=review_obj.rule_user_group_data()
469 472 )
470 473
471 474 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
472 475 member_reviewer['review_status'] = current_review_status
473 476 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
474 477 member_reviewer['allowed_to_update'] = c.allowed_to_update
475 478 c.pull_request_set_reviewers_data_json['reviewers'].append(member_reviewer)
476 479
477 480 c.pull_request_set_reviewers_data_json = json.dumps(c.pull_request_set_reviewers_data_json)
478 481
479 482 for observer_obj, member in pull_request_at_ver.observers():
480 483 member_observer = h.reviewer_as_json(
481 484 member, reasons=[], mandatory=False,
482 485 role=observer_obj.role,
483 486 user_group=observer_obj.rule_user_group_data()
484 487 )
485 488 member_observer['allowed_to_update'] = c.allowed_to_update
486 489 c.pull_request_set_observers_data_json['observers'].append(member_observer)
487 490
488 491 c.pull_request_set_observers_data_json = json.dumps(c.pull_request_set_observers_data_json)
489 492
490 493 general_comments, inline_comments = \
491 494 self.register_comments_vars(c, pull_request_latest, versions)
492 495
493 496 # TODOs
494 497 c.unresolved_comments = CommentsModel() \
495 498 .get_pull_request_unresolved_todos(pull_request_latest)
496 499 c.resolved_comments = CommentsModel() \
497 500 .get_pull_request_resolved_todos(pull_request_latest)
498 501
499 502 # Drafts
500 503 c.draft_comments = CommentsModel().get_pull_request_drafts(
501 504 self._rhodecode_db_user.user_id,
502 505 pull_request_latest)
503 506
504 507 # if we use version, then do not show later comments
505 508 # than current version
506 509 display_inline_comments = collections.defaultdict(
507 510 lambda: collections.defaultdict(list))
508 511 for co in inline_comments:
509 512 if c.at_version_num:
510 513 # pick comments that are at least UPTO given version, so we
511 514 # don't render comments for higher version
512 515 should_render = co.pull_request_version_id and \
513 516 co.pull_request_version_id <= c.at_version_num
514 517 else:
515 518 # showing all, for 'latest'
516 519 should_render = True
517 520
518 521 if should_render:
519 522 display_inline_comments[co.f_path][co.line_no].append(co)
520 523
521 524 # load diff data into template context, if we use compare mode then
522 525 # diff is calculated based on changes between versions of PR
523 526
524 527 source_repo = pull_request_at_ver.source_repo
525 528 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
526 529
527 530 target_repo = pull_request_at_ver.target_repo
528 531 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
529 532
530 533 if compare:
531 534 # in compare switch the diff base to latest commit from prev version
532 535 target_ref_id = prev_pull_request_display_obj.revisions[0]
533 536
534 537 # despite opening commits for bookmarks/branches/tags, we always
535 538 # convert this to rev to prevent changes after bookmark or branch change
536 539 c.source_ref_type = 'rev'
537 540 c.source_ref = source_ref_id
538 541
539 542 c.target_ref_type = 'rev'
540 543 c.target_ref = target_ref_id
541 544
542 545 c.source_repo = source_repo
543 546 c.target_repo = target_repo
544 547
545 548 c.commit_ranges = []
546 549 source_commit = EmptyCommit()
547 550 target_commit = EmptyCommit()
548 551 c.missing_requirements = False
549 552
550 553 source_scm = source_repo.scm_instance()
551 554 target_scm = target_repo.scm_instance()
552 555
553 556 shadow_scm = None
554 557 try:
555 558 shadow_scm = pull_request_latest.get_shadow_repo()
556 559 except Exception:
557 560 log.debug('Failed to get shadow repo', exc_info=True)
558 561 # try first the existing source_repo, and then shadow
559 562 # repo if we can obtain one
560 563 commits_source_repo = source_scm
561 564 if shadow_scm:
562 565 commits_source_repo = shadow_scm
563 566
564 567 c.commits_source_repo = commits_source_repo
565 568 c.ancestor = None # set it to None, to hide it from PR view
566 569
567 570 # empty version means latest, so we keep this to prevent
568 571 # double caching
569 572 version_normalized = version or PullRequest.LATEST_VER
570 573 from_version_normalized = from_version or PullRequest.LATEST_VER
571 574
572 575 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
573 576 cache_file_path = diff_cache_exist(
574 577 cache_path, 'pull_request', pull_request_id, version_normalized,
575 578 from_version_normalized, source_ref_id, target_ref_id,
576 579 hide_whitespace_changes, diff_context, c.fulldiff)
577 580
578 581 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
579 582 force_recache = self.get_recache_flag()
580 583
581 584 cached_diff = None
582 585 if caching_enabled:
583 586 cached_diff = load_cached_diff(cache_file_path)
584 587
585 588 has_proper_commit_cache = (
586 589 cached_diff and cached_diff.get('commits')
587 590 and len(cached_diff.get('commits', [])) == 5
588 591 and cached_diff.get('commits')[0]
589 592 and cached_diff.get('commits')[3])
590 593
591 594 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
592 595 diff_commit_cache = \
593 596 (ancestor_commit, commit_cache, missing_requirements,
594 597 source_commit, target_commit) = cached_diff['commits']
595 598 else:
596 599 # NOTE(marcink): we reach potentially unreachable errors when a PR has
597 600 # merge errors resulting in potentially hidden commits in the shadow repo.
598 601 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
599 602 and _merge_check.merge_response
600 603 maybe_unreachable = maybe_unreachable \
601 604 and _merge_check.merge_response.metadata.get('unresolved_files')
602 605 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
603 606 diff_commit_cache = \
604 607 (ancestor_commit, commit_cache, missing_requirements,
605 608 source_commit, target_commit) = self.get_commits(
606 609 commits_source_repo,
607 610 pull_request_at_ver,
608 611 source_commit,
609 612 source_ref_id,
610 613 source_scm,
611 614 target_commit,
612 615 target_ref_id,
613 616 target_scm,
614 617 maybe_unreachable=maybe_unreachable)
615 618
616 619 # register our commit range
617 620 for comm in commit_cache.values():
618 621 c.commit_ranges.append(comm)
619 622
620 623 c.missing_requirements = missing_requirements
621 624 c.ancestor_commit = ancestor_commit
622 625 c.statuses = source_repo.statuses(
623 626 [x.raw_id for x in c.commit_ranges])
624 627
625 628 # auto collapse if we have more than limit
626 629 collapse_limit = diffs.DiffProcessor._collapse_commits_over
627 630 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
628 631 c.compare_mode = compare
629 632
630 633 # diff_limit is the old behavior, will cut off the whole diff
631 634 # if the limit is applied otherwise will just hide the
632 635 # big files from the front-end
633 636 diff_limit = c.visual.cut_off_limit_diff
634 637 file_limit = c.visual.cut_off_limit_file
635 638
636 639 c.missing_commits = False
637 640 if (c.missing_requirements
638 641 or isinstance(source_commit, EmptyCommit)
639 642 or source_commit == target_commit):
640 643
641 644 c.missing_commits = True
642 645 else:
643 646 c.inline_comments = display_inline_comments
644 647
645 648 use_ancestor = True
646 649 if from_version_normalized != version_normalized:
647 650 use_ancestor = False
648 651
649 652 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
650 653 if not force_recache and has_proper_diff_cache:
651 654 c.diffset = cached_diff['diff']
652 655 else:
653 656 try:
654 657 c.diffset = self._get_diffset(
655 658 c.source_repo.repo_name, commits_source_repo,
656 659 c.ancestor_commit,
657 660 source_ref_id, target_ref_id,
658 661 target_commit, source_commit,
659 662 diff_limit, file_limit, c.fulldiff,
660 663 hide_whitespace_changes, diff_context,
661 664 use_ancestor=use_ancestor
662 665 )
663 666
664 667 # save cached diff
665 668 if caching_enabled:
666 669 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
667 670 except CommitDoesNotExistError:
668 671 log.exception('Failed to generate diffset')
669 672 c.missing_commits = True
670 673
671 674 if not c.missing_commits:
672 675
673 676 c.limited_diff = c.diffset.limited_diff
674 677
675 678 # calculate removed files that are bound to comments
676 679 comment_deleted_files = [
677 680 fname for fname in display_inline_comments
678 681 if fname not in c.diffset.file_stats]
679 682
680 683 c.deleted_files_comments = collections.defaultdict(dict)
681 684 for fname, per_line_comments in display_inline_comments.items():
682 685 if fname in comment_deleted_files:
683 686 c.deleted_files_comments[fname]['stats'] = 0
684 687 c.deleted_files_comments[fname]['comments'] = list()
685 688 for lno, comments in per_line_comments.items():
686 689 c.deleted_files_comments[fname]['comments'].extend(comments)
687 690
688 691 # maybe calculate the range diff
689 692 if c.range_diff_on:
690 693 # TODO(marcink): set whitespace/context
691 694 context_lcl = 3
692 695 ign_whitespace_lcl = False
693 696
694 697 for commit in c.commit_ranges:
695 698 commit2 = commit
696 699 commit1 = commit.first_parent
697 700
698 701 range_diff_cache_file_path = diff_cache_exist(
699 702 cache_path, 'diff', commit.raw_id,
700 703 ign_whitespace_lcl, context_lcl, c.fulldiff)
701 704
702 705 cached_diff = None
703 706 if caching_enabled:
704 707 cached_diff = load_cached_diff(range_diff_cache_file_path)
705 708
706 709 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
707 710 if not force_recache and has_proper_diff_cache:
708 711 diffset = cached_diff['diff']
709 712 else:
710 713 diffset = self._get_range_diffset(
711 714 commits_source_repo, source_repo,
712 715 commit1, commit2, diff_limit, file_limit,
713 716 c.fulldiff, ign_whitespace_lcl, context_lcl
714 717 )
715 718
716 719 # save cached diff
717 720 if caching_enabled:
718 721 cache_diff(range_diff_cache_file_path, diffset, None)
719 722
720 723 c.changes[commit.raw_id] = diffset
721 724
722 725 # this is a hack to properly display links, when creating PR, the
723 726 # compare view and others uses different notation, and
724 727 # compare_commits.mako renders links based on the target_repo.
725 728 # We need to swap that here to generate it properly on the html side
726 729 c.target_repo = c.source_repo
727 730
728 731 c.commit_statuses = ChangesetStatus.STATUSES
729 732
730 733 c.show_version_changes = not pr_closed
731 734 if c.show_version_changes:
732 735 cur_obj = pull_request_at_ver
733 736 prev_obj = prev_pull_request_at_ver
734 737
735 738 old_commit_ids = prev_obj.revisions
736 739 new_commit_ids = cur_obj.revisions
737 740 commit_changes = PullRequestModel()._calculate_commit_id_changes(
738 741 old_commit_ids, new_commit_ids)
739 742 c.commit_changes_summary = commit_changes
740 743
741 744 # calculate the diff for commits between versions
742 745 c.commit_changes = []
743 746
744 747 def mark(cs, fw):
745 748 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
746 749
747 750 for c_type, raw_id in mark(commit_changes.added, 'a') \
748 751 + mark(commit_changes.removed, 'r') \
749 752 + mark(commit_changes.common, 'c'):
750 753
751 754 if raw_id in commit_cache:
752 755 commit = commit_cache[raw_id]
753 756 else:
754 757 try:
755 758 commit = commits_source_repo.get_commit(raw_id)
756 759 except CommitDoesNotExistError:
757 760 # in case we fail extracting still use "dummy" commit
758 761 # for display in commit diff
759 762 commit = h.AttributeDict(
760 763 {'raw_id': raw_id,
761 764 'message': 'EMPTY or MISSING COMMIT'})
762 765 c.commit_changes.append([c_type, commit])
763 766
764 767 # current user review statuses for each version
765 768 c.review_versions = {}
766 769 is_reviewer = PullRequestModel().is_user_reviewer(
767 770 pull_request, self._rhodecode_user)
768 771 if is_reviewer:
769 772 for co in general_comments:
770 773 if co.author.user_id == self._rhodecode_user.user_id:
771 774 status = co.status_change
772 775 if status:
773 776 _ver_pr = status[0].comment.pull_request_version_id
774 777 c.review_versions[_ver_pr] = status[0]
775 778
776 779 return self._get_template_context(c)
777 780
778 781 def get_commits(
779 782 self, commits_source_repo, pull_request_at_ver, source_commit,
780 783 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
781 784 maybe_unreachable=False):
782 785
783 786 commit_cache = collections.OrderedDict()
784 787 missing_requirements = False
785 788
786 789 try:
787 790 pre_load = ["author", "date", "message", "branch", "parents"]
788 791
789 792 pull_request_commits = pull_request_at_ver.revisions
790 793 log.debug('Loading %s commits from %s',
791 794 len(pull_request_commits), commits_source_repo)
792 795
793 796 for rev in pull_request_commits:
794 797 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
795 798 maybe_unreachable=maybe_unreachable)
796 799 commit_cache[comm.raw_id] = comm
797 800
798 801 # Order here matters, we first need to get target, and then
799 802 # the source
800 803 target_commit = commits_source_repo.get_commit(
801 804 commit_id=safe_str(target_ref_id))
802 805
803 806 source_commit = commits_source_repo.get_commit(
804 807 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
805 808 except CommitDoesNotExistError:
806 809 log.warning('Failed to get commit from `{}` repo'.format(
807 810 commits_source_repo), exc_info=True)
808 811 except RepositoryRequirementError:
809 812 log.warning('Failed to get all required data from repo', exc_info=True)
810 813 missing_requirements = True
811 814
812 815 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
813 816
814 817 try:
815 818 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
816 819 except Exception:
817 820 ancestor_commit = None
818 821
819 822 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
820 823
821 824 def assure_not_empty_repo(self):
822 825 _ = self.request.translate
823 826
824 827 try:
825 828 self.db_repo.scm_instance().get_commit()
826 829 except EmptyRepositoryError:
827 830 h.flash(h.literal(_('There are no commits yet')),
828 831 category='warning')
829 832 raise HTTPFound(
830 833 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
831 834
832 835 @LoginRequired()
833 836 @NotAnonymous()
834 837 @HasRepoPermissionAnyDecorator(
835 838 'repository.read', 'repository.write', 'repository.admin')
836 839 def pull_request_new(self):
837 840 _ = self.request.translate
838 841 c = self.load_default_context()
839 842
840 843 self.assure_not_empty_repo()
841 844 source_repo = self.db_repo
842 845
843 846 commit_id = self.request.GET.get('commit')
844 847 branch_ref = self.request.GET.get('branch')
845 848 bookmark_ref = self.request.GET.get('bookmark')
846 849
847 850 try:
848 851 source_repo_data = PullRequestModel().generate_repo_data(
849 852 source_repo, commit_id=commit_id,
850 853 branch=branch_ref, bookmark=bookmark_ref,
851 854 translator=self.request.translate)
852 855 except CommitDoesNotExistError as e:
853 856 log.exception(e)
854 857 h.flash(_('Commit does not exist'), 'error')
855 858 raise HTTPFound(
856 859 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
857 860
858 861 default_target_repo = source_repo
859 862
860 863 if source_repo.parent and c.has_origin_repo_read_perm:
861 864 parent_vcs_obj = source_repo.parent.scm_instance()
862 865 if parent_vcs_obj and not parent_vcs_obj.is_empty():
863 866 # change default if we have a parent repo
864 867 default_target_repo = source_repo.parent
865 868
866 869 target_repo_data = PullRequestModel().generate_repo_data(
867 870 default_target_repo, translator=self.request.translate)
868 871
869 872 selected_source_ref = source_repo_data['refs']['selected_ref']
870 873 title_source_ref = ''
871 874 if selected_source_ref:
872 875 title_source_ref = selected_source_ref.split(':', 2)[1]
873 876 c.default_title = PullRequestModel().generate_pullrequest_title(
874 877 source=source_repo.repo_name,
875 878 source_ref=title_source_ref,
876 879 target=default_target_repo.repo_name
877 880 )
878 881
879 882 c.default_repo_data = {
880 883 'source_repo_name': source_repo.repo_name,
881 884 'source_refs_json': json.dumps(source_repo_data),
882 885 'target_repo_name': default_target_repo.repo_name,
883 886 'target_refs_json': json.dumps(target_repo_data),
884 887 }
885 888 c.default_source_ref = selected_source_ref
886 889
887 890 return self._get_template_context(c)
888 891
889 892 @LoginRequired()
890 893 @NotAnonymous()
891 894 @HasRepoPermissionAnyDecorator(
892 895 'repository.read', 'repository.write', 'repository.admin')
893 896 def pull_request_repo_refs(self):
894 897 self.load_default_context()
895 898 target_repo_name = self.request.matchdict['target_repo_name']
896 899 repo = Repository.get_by_repo_name(target_repo_name)
897 900 if not repo:
898 901 raise HTTPNotFound()
899 902
900 903 target_perm = HasRepoPermissionAny(
901 904 'repository.read', 'repository.write', 'repository.admin')(
902 905 target_repo_name)
903 906 if not target_perm:
904 907 raise HTTPNotFound()
905 908
906 909 return PullRequestModel().generate_repo_data(
907 910 repo, translator=self.request.translate)
908 911
909 912 @LoginRequired()
910 913 @NotAnonymous()
911 914 @HasRepoPermissionAnyDecorator(
912 915 'repository.read', 'repository.write', 'repository.admin')
913 916 def pullrequest_repo_targets(self):
914 917 _ = self.request.translate
915 918 filter_query = self.request.GET.get('query')
916 919
917 920 # get the parents
918 921 parent_target_repos = []
919 922 if self.db_repo.parent:
920 923 parents_query = Repository.query() \
921 924 .order_by(func.length(Repository.repo_name)) \
922 925 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
923 926
924 927 if filter_query:
925 928 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
926 929 parents_query = parents_query.filter(
927 930 Repository.repo_name.ilike(ilike_expression))
928 931 parents = parents_query.limit(20).all()
929 932
930 933 for parent in parents:
931 934 parent_vcs_obj = parent.scm_instance()
932 935 if parent_vcs_obj and not parent_vcs_obj.is_empty():
933 936 parent_target_repos.append(parent)
934 937
935 938 # get other forks, and repo itself
936 939 query = Repository.query() \
937 940 .order_by(func.length(Repository.repo_name)) \
938 941 .filter(
939 942 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
940 943 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
941 944 ) \
942 945 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
943 946
944 947 if filter_query:
945 948 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
946 949 query = query.filter(Repository.repo_name.ilike(ilike_expression))
947 950
948 951 limit = max(20 - len(parent_target_repos), 5) # not less then 5
949 952 target_repos = query.limit(limit).all()
950 953
951 954 all_target_repos = target_repos + parent_target_repos
952 955
953 956 repos = []
954 957 # This checks permissions to the repositories
955 958 for obj in ScmModel().get_repos(all_target_repos):
956 959 repos.append({
957 960 'id': obj['name'],
958 961 'text': obj['name'],
959 962 'type': 'repo',
960 963 'repo_id': obj['dbrepo']['repo_id'],
961 964 'repo_type': obj['dbrepo']['repo_type'],
962 965 'private': obj['dbrepo']['private'],
963 966
964 967 })
965 968
966 969 data = {
967 970 'more': False,
968 971 'results': [{
969 972 'text': _('Repositories'),
970 973 'children': repos
971 974 }] if repos else []
972 975 }
973 976 return data
974 977
975 978 @classmethod
976 979 def get_comment_ids(cls, post_data):
977 980 return filter(lambda e: e > 0, map(safe_int, aslist(post_data.get('comments'), ',')))
978 981
979 982 @LoginRequired()
980 983 @NotAnonymous()
981 984 @HasRepoPermissionAnyDecorator(
982 985 'repository.read', 'repository.write', 'repository.admin')
983 986 def pullrequest_comments(self):
984 987 self.load_default_context()
985 988
986 989 pull_request = PullRequest.get_or_404(
987 990 self.request.matchdict['pull_request_id'])
988 991 pull_request_id = pull_request.pull_request_id
989 992 version = self.request.GET.get('version')
990 993
991 994 _render = self.request.get_partial_renderer(
992 995 'rhodecode:templates/base/sidebar.mako')
993 996 c = _render.get_call_context()
994 997
995 998 (pull_request_latest,
996 999 pull_request_at_ver,
997 1000 pull_request_display_obj,
998 1001 at_version) = PullRequestModel().get_pr_version(
999 1002 pull_request_id, version=version)
1000 1003 versions = pull_request_display_obj.versions()
1001 1004 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1002 1005 c.versions = versions + [latest_ver]
1003 1006
1004 1007 c.at_version = at_version
1005 1008 c.at_version_num = (at_version
1006 1009 if at_version and at_version != PullRequest.LATEST_VER
1007 1010 else None)
1008 1011
1009 1012 self.register_comments_vars(c, pull_request_latest, versions, include_drafts=False)
1010 1013 all_comments = c.inline_comments_flat + c.comments
1011 1014
1012 1015 existing_ids = self.get_comment_ids(self.request.POST)
1013 1016 return _render('comments_table', all_comments, len(all_comments),
1014 1017 existing_ids=existing_ids)
1015 1018
1016 1019 @LoginRequired()
1017 1020 @NotAnonymous()
1018 1021 @HasRepoPermissionAnyDecorator(
1019 1022 'repository.read', 'repository.write', 'repository.admin')
1020 1023 def pullrequest_todos(self):
1021 1024 self.load_default_context()
1022 1025
1023 1026 pull_request = PullRequest.get_or_404(
1024 1027 self.request.matchdict['pull_request_id'])
1025 1028 pull_request_id = pull_request.pull_request_id
1026 1029 version = self.request.GET.get('version')
1027 1030
1028 1031 _render = self.request.get_partial_renderer(
1029 1032 'rhodecode:templates/base/sidebar.mako')
1030 1033 c = _render.get_call_context()
1031 1034 (pull_request_latest,
1032 1035 pull_request_at_ver,
1033 1036 pull_request_display_obj,
1034 1037 at_version) = PullRequestModel().get_pr_version(
1035 1038 pull_request_id, version=version)
1036 1039 versions = pull_request_display_obj.versions()
1037 1040 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1038 1041 c.versions = versions + [latest_ver]
1039 1042
1040 1043 c.at_version = at_version
1041 1044 c.at_version_num = (at_version
1042 1045 if at_version and at_version != PullRequest.LATEST_VER
1043 1046 else None)
1044 1047
1045 1048 c.unresolved_comments = CommentsModel() \
1046 1049 .get_pull_request_unresolved_todos(pull_request, include_drafts=False)
1047 1050 c.resolved_comments = CommentsModel() \
1048 1051 .get_pull_request_resolved_todos(pull_request, include_drafts=False)
1049 1052
1050 1053 all_comments = c.unresolved_comments + c.resolved_comments
1051 1054 existing_ids = self.get_comment_ids(self.request.POST)
1052 1055 return _render('comments_table', all_comments, len(c.unresolved_comments),
1053 1056 todo_comments=True, existing_ids=existing_ids)
1054 1057
1055 1058 @LoginRequired()
1056 1059 @NotAnonymous()
1057 1060 @HasRepoPermissionAnyDecorator(
1058 1061 'repository.read', 'repository.write', 'repository.admin')
1059 1062 def pullrequest_drafts(self):
1060 1063 self.load_default_context()
1061 1064
1062 1065 pull_request = PullRequest.get_or_404(
1063 1066 self.request.matchdict['pull_request_id'])
1064 1067 pull_request_id = pull_request.pull_request_id
1065 1068 version = self.request.GET.get('version')
1066 1069
1067 1070 _render = self.request.get_partial_renderer(
1068 1071 'rhodecode:templates/base/sidebar.mako')
1069 1072 c = _render.get_call_context()
1070 1073
1071 1074 (pull_request_latest,
1072 1075 pull_request_at_ver,
1073 1076 pull_request_display_obj,
1074 1077 at_version) = PullRequestModel().get_pr_version(
1075 1078 pull_request_id, version=version)
1076 1079 versions = pull_request_display_obj.versions()
1077 1080 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1078 1081 c.versions = versions + [latest_ver]
1079 1082
1080 1083 c.at_version = at_version
1081 1084 c.at_version_num = (at_version
1082 1085 if at_version and at_version != PullRequest.LATEST_VER
1083 1086 else None)
1084 1087
1085 1088 c.draft_comments = CommentsModel() \
1086 1089 .get_pull_request_drafts(self._rhodecode_db_user.user_id, pull_request)
1087 1090
1088 1091 all_comments = c.draft_comments
1089 1092
1090 1093 existing_ids = self.get_comment_ids(self.request.POST)
1091 1094 return _render('comments_table', all_comments, len(all_comments),
1092 1095 existing_ids=existing_ids, draft_comments=True)
1093 1096
1094 1097 @LoginRequired()
1095 1098 @NotAnonymous()
1096 1099 @HasRepoPermissionAnyDecorator(
1097 1100 'repository.read', 'repository.write', 'repository.admin')
1098 1101 @CSRFRequired()
1099 1102 def pull_request_create(self):
1100 1103 _ = self.request.translate
1101 1104 self.assure_not_empty_repo()
1102 1105 self.load_default_context()
1103 1106
1104 1107 controls = peppercorn.parse(self.request.POST.items())
1105 1108
1106 1109 try:
1107 1110 form = PullRequestForm(
1108 1111 self.request.translate, self.db_repo.repo_id)()
1109 1112 _form = form.to_python(controls)
1110 1113 except formencode.Invalid as errors:
1111 1114 if errors.error_dict.get('revisions'):
1112 1115 msg = 'Revisions: %s' % errors.error_dict['revisions']
1113 1116 elif errors.error_dict.get('pullrequest_title'):
1114 1117 msg = errors.error_dict.get('pullrequest_title')
1115 1118 else:
1116 1119 msg = _('Error creating pull request: {}').format(errors)
1117 1120 log.exception(msg)
1118 1121 h.flash(msg, 'error')
1119 1122
1120 1123 # would rather just go back to form ...
1121 1124 raise HTTPFound(
1122 1125 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1123 1126
1124 1127 source_repo = _form['source_repo']
1125 1128 source_ref = _form['source_ref']
1126 1129 target_repo = _form['target_repo']
1127 1130 target_ref = _form['target_ref']
1128 1131 commit_ids = _form['revisions'][::-1]
1129 1132 common_ancestor_id = _form['common_ancestor']
1130 1133
1131 1134 # find the ancestor for this pr
1132 1135 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
1133 1136 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
1134 1137
1135 1138 if not (source_db_repo or target_db_repo):
1136 1139 h.flash(_('source_repo or target repo not found'), category='error')
1137 1140 raise HTTPFound(
1138 1141 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1139 1142
1140 1143 # re-check permissions again here
1141 1144 # source_repo we must have read permissions
1142 1145
1143 1146 source_perm = HasRepoPermissionAny(
1144 1147 'repository.read', 'repository.write', 'repository.admin')(
1145 1148 source_db_repo.repo_name)
1146 1149 if not source_perm:
1147 1150 msg = _('Not Enough permissions to source repo `{}`.'.format(
1148 1151 source_db_repo.repo_name))
1149 1152 h.flash(msg, category='error')
1150 1153 # copy the args back to redirect
1151 1154 org_query = self.request.GET.mixed()
1152 1155 raise HTTPFound(
1153 1156 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1154 1157 _query=org_query))
1155 1158
1156 1159 # target repo we must have read permissions, and also later on
1157 1160 # we want to check branch permissions here
1158 1161 target_perm = HasRepoPermissionAny(
1159 1162 'repository.read', 'repository.write', 'repository.admin')(
1160 1163 target_db_repo.repo_name)
1161 1164 if not target_perm:
1162 1165 msg = _('Not Enough permissions to target repo `{}`.'.format(
1163 1166 target_db_repo.repo_name))
1164 1167 h.flash(msg, category='error')
1165 1168 # copy the args back to redirect
1166 1169 org_query = self.request.GET.mixed()
1167 1170 raise HTTPFound(
1168 1171 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1169 1172 _query=org_query))
1170 1173
1171 1174 source_scm = source_db_repo.scm_instance()
1172 1175 target_scm = target_db_repo.scm_instance()
1173 1176
1174 1177 source_ref_obj = unicode_to_reference(source_ref)
1175 1178 target_ref_obj = unicode_to_reference(target_ref)
1176 1179
1177 1180 source_commit = source_scm.get_commit(source_ref_obj.commit_id)
1178 1181 target_commit = target_scm.get_commit(target_ref_obj.commit_id)
1179 1182
1180 1183 ancestor = source_scm.get_common_ancestor(
1181 1184 source_commit.raw_id, target_commit.raw_id, target_scm)
1182 1185
1183 1186 # recalculate target ref based on ancestor
1184 1187 target_ref = ':'.join((target_ref_obj.type, target_ref_obj.name, ancestor))
1185 1188
1186 1189 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1187 1190 PullRequestModel().get_reviewer_functions()
1188 1191
1189 1192 # recalculate reviewers logic, to make sure we can validate this
1190 1193 reviewer_rules = get_default_reviewers_data(
1191 1194 self._rhodecode_db_user,
1192 1195 source_db_repo,
1193 1196 source_ref_obj,
1194 1197 target_db_repo,
1195 1198 target_ref_obj,
1196 1199 include_diff_info=False)
1197 1200
1198 1201 reviewers = validate_default_reviewers(_form['review_members'], reviewer_rules)
1199 1202 observers = validate_observers(_form['observer_members'], reviewer_rules)
1200 1203
1201 1204 pullrequest_title = _form['pullrequest_title']
1202 1205 title_source_ref = source_ref_obj.name
1203 1206 if not pullrequest_title:
1204 1207 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1205 1208 source=source_repo,
1206 1209 source_ref=title_source_ref,
1207 1210 target=target_repo
1208 1211 )
1209 1212
1210 1213 description = _form['pullrequest_desc']
1211 1214 description_renderer = _form['description_renderer']
1212 1215
1213 1216 try:
1214 1217 pull_request = PullRequestModel().create(
1215 1218 created_by=self._rhodecode_user.user_id,
1216 1219 source_repo=source_repo,
1217 1220 source_ref=source_ref,
1218 1221 target_repo=target_repo,
1219 1222 target_ref=target_ref,
1220 1223 revisions=commit_ids,
1221 1224 common_ancestor_id=common_ancestor_id,
1222 1225 reviewers=reviewers,
1223 1226 observers=observers,
1224 1227 title=pullrequest_title,
1225 1228 description=description,
1226 1229 description_renderer=description_renderer,
1227 1230 reviewer_data=reviewer_rules,
1228 1231 auth_user=self._rhodecode_user
1229 1232 )
1230 1233 Session().commit()
1231 1234
1232 1235 h.flash(_('Successfully opened new pull request'),
1233 1236 category='success')
1234 1237 except Exception:
1235 1238 msg = _('Error occurred during creation of this pull request.')
1236 1239 log.exception(msg)
1237 1240 h.flash(msg, category='error')
1238 1241
1239 1242 # copy the args back to redirect
1240 1243 org_query = self.request.GET.mixed()
1241 1244 raise HTTPFound(
1242 1245 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1243 1246 _query=org_query))
1244 1247
1245 1248 raise HTTPFound(
1246 1249 h.route_path('pullrequest_show', repo_name=target_repo,
1247 1250 pull_request_id=pull_request.pull_request_id))
1248 1251
1249 1252 @LoginRequired()
1250 1253 @NotAnonymous()
1251 1254 @HasRepoPermissionAnyDecorator(
1252 1255 'repository.read', 'repository.write', 'repository.admin')
1253 1256 @CSRFRequired()
1254 1257 def pull_request_update(self):
1255 1258 pull_request = PullRequest.get_or_404(
1256 1259 self.request.matchdict['pull_request_id'])
1257 1260 _ = self.request.translate
1258 1261
1259 1262 c = self.load_default_context()
1260 1263 redirect_url = None
1261 1264
1262 1265 if pull_request.is_closed():
1263 1266 log.debug('update: forbidden because pull request is closed')
1264 1267 msg = _(u'Cannot update closed pull requests.')
1265 1268 h.flash(msg, category='error')
1266 1269 return {'response': True,
1267 1270 'redirect_url': redirect_url}
1268 1271
1269 1272 is_state_changing = pull_request.is_state_changing()
1270 1273 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
1271 1274
1272 1275 # only owner or admin can update it
1273 1276 allowed_to_update = PullRequestModel().check_user_update(
1274 1277 pull_request, self._rhodecode_user)
1275 1278
1276 1279 if allowed_to_update:
1277 1280 controls = peppercorn.parse(self.request.POST.items())
1278 1281 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1279 1282
1280 1283 if 'review_members' in controls:
1281 1284 self._update_reviewers(
1282 1285 c,
1283 1286 pull_request, controls['review_members'],
1284 1287 pull_request.reviewer_data,
1285 1288 PullRequestReviewers.ROLE_REVIEWER)
1286 1289 elif 'observer_members' in controls:
1287 1290 self._update_reviewers(
1288 1291 c,
1289 1292 pull_request, controls['observer_members'],
1290 1293 pull_request.reviewer_data,
1291 1294 PullRequestReviewers.ROLE_OBSERVER)
1292 1295 elif str2bool(self.request.POST.get('update_commits', 'false')):
1293 1296 if is_state_changing:
1294 1297 log.debug('commits update: forbidden because pull request is in state %s',
1295 1298 pull_request.pull_request_state)
1296 1299 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1297 1300 u'Current state is: `{}`').format(
1298 1301 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1299 1302 h.flash(msg, category='error')
1300 1303 return {'response': True,
1301 1304 'redirect_url': redirect_url}
1302 1305
1303 1306 self._update_commits(c, pull_request)
1304 1307 if force_refresh:
1305 1308 redirect_url = h.route_path(
1306 1309 'pullrequest_show', repo_name=self.db_repo_name,
1307 1310 pull_request_id=pull_request.pull_request_id,
1308 1311 _query={"force_refresh": 1})
1309 1312 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1310 1313 self._edit_pull_request(pull_request)
1311 1314 else:
1312 1315 log.error('Unhandled update data.')
1313 1316 raise HTTPBadRequest()
1314 1317
1315 1318 return {'response': True,
1316 1319 'redirect_url': redirect_url}
1317 1320 raise HTTPForbidden()
1318 1321
1319 1322 def _edit_pull_request(self, pull_request):
1320 1323 """
1321 1324 Edit title and description
1322 1325 """
1323 1326 _ = self.request.translate
1324 1327
1325 1328 try:
1326 1329 PullRequestModel().edit(
1327 1330 pull_request,
1328 1331 self.request.POST.get('title'),
1329 1332 self.request.POST.get('description'),
1330 1333 self.request.POST.get('description_renderer'),
1331 1334 self._rhodecode_user)
1332 1335 except ValueError:
1333 1336 msg = _(u'Cannot update closed pull requests.')
1334 1337 h.flash(msg, category='error')
1335 1338 return
1336 1339 else:
1337 1340 Session().commit()
1338 1341
1339 1342 msg = _(u'Pull request title & description updated.')
1340 1343 h.flash(msg, category='success')
1341 1344 return
1342 1345
1343 1346 def _update_commits(self, c, pull_request):
1344 1347 _ = self.request.translate
1345 1348
1346 1349 with pull_request.set_state(PullRequest.STATE_UPDATING):
1347 1350 resp = PullRequestModel().update_commits(
1348 1351 pull_request, self._rhodecode_db_user)
1349 1352
1350 1353 if resp.executed:
1351 1354
1352 1355 if resp.target_changed and resp.source_changed:
1353 1356 changed = 'target and source repositories'
1354 1357 elif resp.target_changed and not resp.source_changed:
1355 1358 changed = 'target repository'
1356 1359 elif not resp.target_changed and resp.source_changed:
1357 1360 changed = 'source repository'
1358 1361 else:
1359 1362 changed = 'nothing'
1360 1363
1361 1364 msg = _(u'Pull request updated to "{source_commit_id}" with '
1362 1365 u'{count_added} added, {count_removed} removed commits. '
1363 1366 u'Source of changes: {change_source}.')
1364 1367 msg = msg.format(
1365 1368 source_commit_id=pull_request.source_ref_parts.commit_id,
1366 1369 count_added=len(resp.changes.added),
1367 1370 count_removed=len(resp.changes.removed),
1368 1371 change_source=changed)
1369 1372 h.flash(msg, category='success')
1370 1373 channelstream.pr_update_channelstream_push(
1371 1374 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1372 1375 else:
1373 1376 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1374 1377 warning_reasons = [
1375 1378 UpdateFailureReason.NO_CHANGE,
1376 1379 UpdateFailureReason.WRONG_REF_TYPE,
1377 1380 ]
1378 1381 category = 'warning' if resp.reason in warning_reasons else 'error'
1379 1382 h.flash(msg, category=category)
1380 1383
1381 1384 def _update_reviewers(self, c, pull_request, review_members, reviewer_rules, role):
1382 1385 _ = self.request.translate
1383 1386
1384 1387 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1385 1388 PullRequestModel().get_reviewer_functions()
1386 1389
1387 1390 if role == PullRequestReviewers.ROLE_REVIEWER:
1388 1391 try:
1389 1392 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1390 1393 except ValueError as e:
1391 1394 log.error('Reviewers Validation: {}'.format(e))
1392 1395 h.flash(e, category='error')
1393 1396 return
1394 1397
1395 1398 old_calculated_status = pull_request.calculated_review_status()
1396 1399 PullRequestModel().update_reviewers(
1397 1400 pull_request, reviewers, self._rhodecode_db_user)
1398 1401
1399 1402 Session().commit()
1400 1403
1401 1404 msg = _('Pull request reviewers updated.')
1402 1405 h.flash(msg, category='success')
1403 1406 channelstream.pr_update_channelstream_push(
1404 1407 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1405 1408
1406 1409 # trigger status changed if change in reviewers changes the status
1407 1410 calculated_status = pull_request.calculated_review_status()
1408 1411 if old_calculated_status != calculated_status:
1409 1412 PullRequestModel().trigger_pull_request_hook(
1410 1413 pull_request, self._rhodecode_user, 'review_status_change',
1411 1414 data={'status': calculated_status})
1412 1415
1413 1416 elif role == PullRequestReviewers.ROLE_OBSERVER:
1414 1417 try:
1415 1418 observers = validate_observers(review_members, reviewer_rules)
1416 1419 except ValueError as e:
1417 1420 log.error('Observers Validation: {}'.format(e))
1418 1421 h.flash(e, category='error')
1419 1422 return
1420 1423
1421 1424 PullRequestModel().update_observers(
1422 1425 pull_request, observers, self._rhodecode_db_user)
1423 1426
1424 1427 Session().commit()
1425 1428 msg = _('Pull request observers updated.')
1426 1429 h.flash(msg, category='success')
1427 1430 channelstream.pr_update_channelstream_push(
1428 1431 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1429 1432
1430 1433 @LoginRequired()
1431 1434 @NotAnonymous()
1432 1435 @HasRepoPermissionAnyDecorator(
1433 1436 'repository.read', 'repository.write', 'repository.admin')
1434 1437 @CSRFRequired()
1435 1438 def pull_request_merge(self):
1436 1439 """
1437 1440 Merge will perform a server-side merge of the specified
1438 1441 pull request, if the pull request is approved and mergeable.
1439 1442 After successful merging, the pull request is automatically
1440 1443 closed, with a relevant comment.
1441 1444 """
1442 1445 pull_request = PullRequest.get_or_404(
1443 1446 self.request.matchdict['pull_request_id'])
1444 1447 _ = self.request.translate
1445 1448
1446 1449 if pull_request.is_state_changing():
1447 1450 log.debug('show: forbidden because pull request is in state %s',
1448 1451 pull_request.pull_request_state)
1449 1452 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1450 1453 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1451 1454 pull_request.pull_request_state)
1452 1455 h.flash(msg, category='error')
1453 1456 raise HTTPFound(
1454 1457 h.route_path('pullrequest_show',
1455 1458 repo_name=pull_request.target_repo.repo_name,
1456 1459 pull_request_id=pull_request.pull_request_id))
1457 1460
1458 1461 self.load_default_context()
1459 1462
1460 1463 with pull_request.set_state(PullRequest.STATE_UPDATING):
1461 1464 check = MergeCheck.validate(
1462 1465 pull_request, auth_user=self._rhodecode_user,
1463 1466 translator=self.request.translate)
1464 1467 merge_possible = not check.failed
1465 1468
1466 1469 for err_type, error_msg in check.errors:
1467 1470 h.flash(error_msg, category=err_type)
1468 1471
1469 1472 if merge_possible:
1470 1473 log.debug("Pre-conditions checked, trying to merge.")
1471 1474 extras = vcs_operation_context(
1472 1475 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1473 1476 username=self._rhodecode_db_user.username, action='push',
1474 1477 scm=pull_request.target_repo.repo_type)
1475 1478 with pull_request.set_state(PullRequest.STATE_UPDATING):
1476 1479 self._merge_pull_request(
1477 1480 pull_request, self._rhodecode_db_user, extras)
1478 1481 else:
1479 1482 log.debug("Pre-conditions failed, NOT merging.")
1480 1483
1481 1484 raise HTTPFound(
1482 1485 h.route_path('pullrequest_show',
1483 1486 repo_name=pull_request.target_repo.repo_name,
1484 1487 pull_request_id=pull_request.pull_request_id))
1485 1488
1486 1489 def _merge_pull_request(self, pull_request, user, extras):
1487 1490 _ = self.request.translate
1488 1491 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1489 1492
1490 1493 if merge_resp.executed:
1491 1494 log.debug("The merge was successful, closing the pull request.")
1492 1495 PullRequestModel().close_pull_request(
1493 1496 pull_request.pull_request_id, user)
1494 1497 Session().commit()
1495 1498 msg = _('Pull request was successfully merged and closed.')
1496 1499 h.flash(msg, category='success')
1497 1500 else:
1498 1501 log.debug(
1499 1502 "The merge was not successful. Merge response: %s", merge_resp)
1500 1503 msg = merge_resp.merge_status_message
1501 1504 h.flash(msg, category='error')
1502 1505
1503 1506 @LoginRequired()
1504 1507 @NotAnonymous()
1505 1508 @HasRepoPermissionAnyDecorator(
1506 1509 'repository.read', 'repository.write', 'repository.admin')
1507 1510 @CSRFRequired()
1508 1511 def pull_request_delete(self):
1509 1512 _ = self.request.translate
1510 1513
1511 1514 pull_request = PullRequest.get_or_404(
1512 1515 self.request.matchdict['pull_request_id'])
1513 1516 self.load_default_context()
1514 1517
1515 1518 pr_closed = pull_request.is_closed()
1516 1519 allowed_to_delete = PullRequestModel().check_user_delete(
1517 1520 pull_request, self._rhodecode_user) and not pr_closed
1518 1521
1519 1522 # only owner can delete it !
1520 1523 if allowed_to_delete:
1521 1524 PullRequestModel().delete(pull_request, self._rhodecode_user)
1522 1525 Session().commit()
1523 1526 h.flash(_('Successfully deleted pull request'),
1524 1527 category='success')
1525 1528 raise HTTPFound(h.route_path('pullrequest_show_all',
1526 1529 repo_name=self.db_repo_name))
1527 1530
1528 1531 log.warning('user %s tried to delete pull request without access',
1529 1532 self._rhodecode_user)
1530 1533 raise HTTPNotFound()
1531 1534
1532 1535 def _pull_request_comments_create(self, pull_request, comments):
1533 1536 _ = self.request.translate
1534 1537 data = {}
1535 1538 if not comments:
1536 1539 return
1537 1540 pull_request_id = pull_request.pull_request_id
1538 1541
1539 1542 all_drafts = len([x for x in comments if str2bool(x['is_draft'])]) == len(comments)
1540 1543
1541 1544 for entry in comments:
1542 1545 c = self.load_default_context()
1543 1546 comment_type = entry['comment_type']
1544 1547 text = entry['text']
1545 1548 status = entry['status']
1546 1549 is_draft = str2bool(entry['is_draft'])
1547 1550 resolves_comment_id = entry['resolves_comment_id']
1548 1551 close_pull_request = entry['close_pull_request']
1549 1552 f_path = entry['f_path']
1550 1553 line_no = entry['line']
1551 1554 target_elem_id = 'file-{}'.format(h.safeid(h.safe_unicode(f_path)))
1552 1555
1553 1556 # the logic here should work like following, if we submit close
1554 1557 # pr comment, use `close_pull_request_with_comment` function
1555 1558 # else handle regular comment logic
1556 1559
1557 1560 if close_pull_request:
1558 1561 # only owner or admin or person with write permissions
1559 1562 allowed_to_close = PullRequestModel().check_user_update(
1560 1563 pull_request, self._rhodecode_user)
1561 1564 if not allowed_to_close:
1562 1565 log.debug('comment: forbidden because not allowed to close '
1563 1566 'pull request %s', pull_request_id)
1564 1567 raise HTTPForbidden()
1565 1568
1566 1569 # This also triggers `review_status_change`
1567 1570 comment, status = PullRequestModel().close_pull_request_with_comment(
1568 1571 pull_request, self._rhodecode_user, self.db_repo, message=text,
1569 1572 auth_user=self._rhodecode_user)
1570 1573 Session().flush()
1571 1574 is_inline = comment.is_inline
1572 1575
1573 1576 PullRequestModel().trigger_pull_request_hook(
1574 1577 pull_request, self._rhodecode_user, 'comment',
1575 1578 data={'comment': comment})
1576 1579
1577 1580 else:
1578 1581 # regular comment case, could be inline, or one with status.
1579 1582 # for that one we check also permissions
1580 1583 # Additionally ENSURE if somehow draft is sent we're then unable to change status
1581 1584 allowed_to_change_status = PullRequestModel().check_user_change_status(
1582 1585 pull_request, self._rhodecode_user) and not is_draft
1583 1586
1584 1587 if status and allowed_to_change_status:
1585 1588 message = (_('Status change %(transition_icon)s %(status)s')
1586 1589 % {'transition_icon': '>',
1587 1590 'status': ChangesetStatus.get_status_lbl(status)})
1588 1591 text = text or message
1589 1592
1590 1593 comment = CommentsModel().create(
1591 1594 text=text,
1592 1595 repo=self.db_repo.repo_id,
1593 1596 user=self._rhodecode_user.user_id,
1594 1597 pull_request=pull_request,
1595 1598 f_path=f_path,
1596 1599 line_no=line_no,
1597 1600 status_change=(ChangesetStatus.get_status_lbl(status)
1598 1601 if status and allowed_to_change_status else None),
1599 1602 status_change_type=(status
1600 1603 if status and allowed_to_change_status else None),
1601 1604 comment_type=comment_type,
1602 1605 is_draft=is_draft,
1603 1606 resolves_comment_id=resolves_comment_id,
1604 1607 auth_user=self._rhodecode_user,
1605 1608 send_email=not is_draft, # skip notification for draft comments
1606 1609 )
1607 1610 is_inline = comment.is_inline
1608 1611
1609 1612 if allowed_to_change_status:
1610 1613 # calculate old status before we change it
1611 1614 old_calculated_status = pull_request.calculated_review_status()
1612 1615
1613 1616 # get status if set !
1614 1617 if status:
1615 1618 ChangesetStatusModel().set_status(
1616 1619 self.db_repo.repo_id,
1617 1620 status,
1618 1621 self._rhodecode_user.user_id,
1619 1622 comment,
1620 1623 pull_request=pull_request
1621 1624 )
1622 1625
1623 1626 Session().flush()
1624 1627 # this is somehow required to get access to some relationship
1625 1628 # loaded on comment
1626 1629 Session().refresh(comment)
1627 1630
1628 1631 # skip notifications for drafts
1629 1632 if not is_draft:
1630 1633 PullRequestModel().trigger_pull_request_hook(
1631 1634 pull_request, self._rhodecode_user, 'comment',
1632 1635 data={'comment': comment})
1633 1636
1634 1637 # we now calculate the status of pull request, and based on that
1635 1638 # calculation we set the commits status
1636 1639 calculated_status = pull_request.calculated_review_status()
1637 1640 if old_calculated_status != calculated_status:
1638 1641 PullRequestModel().trigger_pull_request_hook(
1639 1642 pull_request, self._rhodecode_user, 'review_status_change',
1640 1643 data={'status': calculated_status})
1641 1644
1642 1645 comment_id = comment.comment_id
1643 1646 data[comment_id] = {
1644 1647 'target_id': target_elem_id
1645 1648 }
1646 1649 Session().flush()
1647 1650
1648 1651 c.co = comment
1649 1652 c.at_version_num = None
1650 1653 c.is_new = True
1651 1654 rendered_comment = render(
1652 1655 'rhodecode:templates/changeset/changeset_comment_block.mako',
1653 1656 self._get_template_context(c), self.request)
1654 1657
1655 1658 data[comment_id].update(comment.get_dict())
1656 1659 data[comment_id].update({'rendered_text': rendered_comment})
1657 1660
1658 1661 Session().commit()
1659 1662
1660 1663 # skip channelstream for draft comments
1661 1664 if not all_drafts:
1662 1665 comment_broadcast_channel = channelstream.comment_channel(
1663 1666 self.db_repo_name, pull_request_obj=pull_request)
1664 1667
1665 1668 comment_data = data
1666 1669 posted_comment_type = 'inline' if is_inline else 'general'
1667 1670 if len(data) == 1:
1668 1671 msg = _('posted {} new {} comment').format(len(data), posted_comment_type)
1669 1672 else:
1670 1673 msg = _('posted {} new {} comments').format(len(data), posted_comment_type)
1671 1674
1672 1675 channelstream.comment_channelstream_push(
1673 1676 self.request, comment_broadcast_channel, self._rhodecode_user, msg,
1674 1677 comment_data=comment_data)
1675 1678
1676 1679 return data
1677 1680
1678 1681 @LoginRequired()
1679 1682 @NotAnonymous()
1680 1683 @HasRepoPermissionAnyDecorator(
1681 1684 'repository.read', 'repository.write', 'repository.admin')
1682 1685 @CSRFRequired()
1683 1686 def pull_request_comment_create(self):
1684 1687 _ = self.request.translate
1685 1688
1686 1689 pull_request = PullRequest.get_or_404(self.request.matchdict['pull_request_id'])
1687 1690
1688 1691 if pull_request.is_closed():
1689 1692 log.debug('comment: forbidden because pull request is closed')
1690 1693 raise HTTPForbidden()
1691 1694
1692 1695 allowed_to_comment = PullRequestModel().check_user_comment(
1693 1696 pull_request, self._rhodecode_user)
1694 1697 if not allowed_to_comment:
1695 1698 log.debug('comment: forbidden because pull request is from forbidden repo')
1696 1699 raise HTTPForbidden()
1697 1700
1698 1701 comment_data = {
1699 1702 'comment_type': self.request.POST.get('comment_type'),
1700 1703 'text': self.request.POST.get('text'),
1701 1704 'status': self.request.POST.get('changeset_status', None),
1702 1705 'is_draft': self.request.POST.get('draft'),
1703 1706 'resolves_comment_id': self.request.POST.get('resolves_comment_id', None),
1704 1707 'close_pull_request': self.request.POST.get('close_pull_request'),
1705 1708 'f_path': self.request.POST.get('f_path'),
1706 1709 'line': self.request.POST.get('line'),
1707 1710 }
1708 1711 data = self._pull_request_comments_create(pull_request, [comment_data])
1709 1712
1710 1713 return data
1711 1714
1712 1715 @LoginRequired()
1713 1716 @NotAnonymous()
1714 1717 @HasRepoPermissionAnyDecorator(
1715 1718 'repository.read', 'repository.write', 'repository.admin')
1716 1719 @CSRFRequired()
1717 1720 def pull_request_comment_delete(self):
1718 1721 pull_request = PullRequest.get_or_404(
1719 1722 self.request.matchdict['pull_request_id'])
1720 1723
1721 1724 comment = ChangesetComment.get_or_404(
1722 1725 self.request.matchdict['comment_id'])
1723 1726 comment_id = comment.comment_id
1724 1727
1725 1728 if comment.immutable:
1726 1729 # don't allow deleting comments that are immutable
1727 1730 raise HTTPForbidden()
1728 1731
1729 1732 if pull_request.is_closed():
1730 1733 log.debug('comment: forbidden because pull request is closed')
1731 1734 raise HTTPForbidden()
1732 1735
1733 1736 if not comment:
1734 1737 log.debug('Comment with id:%s not found, skipping', comment_id)
1735 1738 # comment already deleted in another call probably
1736 1739 return True
1737 1740
1738 1741 if comment.pull_request.is_closed():
1739 1742 # don't allow deleting comments on closed pull request
1740 1743 raise HTTPForbidden()
1741 1744
1742 1745 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1743 1746 super_admin = h.HasPermissionAny('hg.admin')()
1744 1747 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1745 1748 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1746 1749 comment_repo_admin = is_repo_admin and is_repo_comment
1747 1750
1748 1751 if super_admin or comment_owner or comment_repo_admin:
1749 1752 old_calculated_status = comment.pull_request.calculated_review_status()
1750 1753 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1751 1754 Session().commit()
1752 1755 calculated_status = comment.pull_request.calculated_review_status()
1753 1756 if old_calculated_status != calculated_status:
1754 1757 PullRequestModel().trigger_pull_request_hook(
1755 1758 comment.pull_request, self._rhodecode_user, 'review_status_change',
1756 1759 data={'status': calculated_status})
1757 1760 return True
1758 1761 else:
1759 1762 log.warning('No permissions for user %s to delete comment_id: %s',
1760 1763 self._rhodecode_db_user, comment_id)
1761 1764 raise HTTPNotFound()
1762 1765
1763 1766 @LoginRequired()
1764 1767 @NotAnonymous()
1765 1768 @HasRepoPermissionAnyDecorator(
1766 1769 'repository.read', 'repository.write', 'repository.admin')
1767 1770 @CSRFRequired()
1768 1771 def pull_request_comment_edit(self):
1769 1772 self.load_default_context()
1770 1773
1771 1774 pull_request = PullRequest.get_or_404(
1772 1775 self.request.matchdict['pull_request_id']
1773 1776 )
1774 1777 comment = ChangesetComment.get_or_404(
1775 1778 self.request.matchdict['comment_id']
1776 1779 )
1777 1780 comment_id = comment.comment_id
1778 1781
1779 1782 if comment.immutable:
1780 1783 # don't allow deleting comments that are immutable
1781 1784 raise HTTPForbidden()
1782 1785
1783 1786 if pull_request.is_closed():
1784 1787 log.debug('comment: forbidden because pull request is closed')
1785 1788 raise HTTPForbidden()
1786 1789
1787 1790 if comment.pull_request.is_closed():
1788 1791 # don't allow deleting comments on closed pull request
1789 1792 raise HTTPForbidden()
1790 1793
1791 1794 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1792 1795 super_admin = h.HasPermissionAny('hg.admin')()
1793 1796 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1794 1797 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1795 1798 comment_repo_admin = is_repo_admin and is_repo_comment
1796 1799
1797 1800 if super_admin or comment_owner or comment_repo_admin:
1798 1801 text = self.request.POST.get('text')
1799 1802 version = self.request.POST.get('version')
1800 1803 if text == comment.text:
1801 1804 log.warning(
1802 1805 'Comment(PR): '
1803 1806 'Trying to create new version '
1804 1807 'with the same comment body {}'.format(
1805 1808 comment_id,
1806 1809 )
1807 1810 )
1808 1811 raise HTTPNotFound()
1809 1812
1810 1813 if version.isdigit():
1811 1814 version = int(version)
1812 1815 else:
1813 1816 log.warning(
1814 1817 'Comment(PR): Wrong version type {} {} '
1815 1818 'for comment {}'.format(
1816 1819 version,
1817 1820 type(version),
1818 1821 comment_id,
1819 1822 )
1820 1823 )
1821 1824 raise HTTPNotFound()
1822 1825
1823 1826 try:
1824 1827 comment_history = CommentsModel().edit(
1825 1828 comment_id=comment_id,
1826 1829 text=text,
1827 1830 auth_user=self._rhodecode_user,
1828 1831 version=version,
1829 1832 )
1830 1833 except CommentVersionMismatch:
1831 1834 raise HTTPConflict()
1832 1835
1833 1836 if not comment_history:
1834 1837 raise HTTPNotFound()
1835 1838
1836 1839 Session().commit()
1837 1840 if not comment.draft:
1838 1841 PullRequestModel().trigger_pull_request_hook(
1839 1842 pull_request, self._rhodecode_user, 'comment_edit',
1840 1843 data={'comment': comment})
1841 1844
1842 1845 return {
1843 1846 'comment_history_id': comment_history.comment_history_id,
1844 1847 'comment_id': comment.comment_id,
1845 1848 'comment_version': comment_history.version,
1846 1849 'comment_author_username': comment_history.author.username,
1847 1850 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
1848 1851 'comment_created_on': h.age_component(comment_history.created_on,
1849 1852 time_is_local=True),
1850 1853 }
1851 1854 else:
1852 1855 log.warning('No permissions for user %s to edit comment_id: %s',
1853 1856 self._rhodecode_db_user, comment_id)
1854 1857 raise HTTPNotFound()
@@ -1,5826 +1,5830 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import uuid
33 33 import warnings
34 34 import ipaddress
35 35 import functools
36 36 import traceback
37 37 import collections
38 38
39 39 from sqlalchemy import (
40 40 or_, and_, not_, func, cast, TypeDecorator, event,
41 41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 43 Text, Float, PickleType, BigInteger)
44 44 from sqlalchemy.sql.expression import true, false, case
45 45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 46 from sqlalchemy.orm import (
47 47 relationship, joinedload, class_mapper, validates, aliased)
48 48 from sqlalchemy.ext.declarative import declared_attr
49 49 from sqlalchemy.ext.hybrid import hybrid_property
50 50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 51 from sqlalchemy.dialects.mysql import LONGTEXT
52 52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 53 from pyramid import compat
54 54 from pyramid.threadlocal import get_current_request
55 55 from webhelpers2.text import remove_formatting
56 56
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
59 59 from rhodecode.lib.vcs.backends.base import (
60 60 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
61 61 from rhodecode.lib.utils2 import (
62 62 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
63 63 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
64 64 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
65 65 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
66 66 JsonRaw
67 67 from rhodecode.lib.ext_json import json
68 68 from rhodecode.lib.caching_query import FromCache
69 69 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
70 70 from rhodecode.lib.encrypt2 import Encryptor
71 71 from rhodecode.lib.exceptions import (
72 72 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
73 73 from rhodecode.model.meta import Base, Session
74 74
75 75 URL_SEP = '/'
76 76 log = logging.getLogger(__name__)
77 77
78 78 # =============================================================================
79 79 # BASE CLASSES
80 80 # =============================================================================
81 81
82 82 # this is propagated from .ini file rhodecode.encrypted_values.secret or
83 83 # beaker.session.secret if first is not set.
84 84 # and initialized at environment.py
85 85 ENCRYPTION_KEY = None
86 86
87 87 # used to sort permissions by types, '#' used here is not allowed to be in
88 88 # usernames, and it's very early in sorted string.printable table.
89 89 PERMISSION_TYPE_SORT = {
90 90 'admin': '####',
91 91 'write': '###',
92 92 'read': '##',
93 93 'none': '#',
94 94 }
95 95
96 96
97 97 def display_user_sort(obj):
98 98 """
99 99 Sort function used to sort permissions in .permissions() function of
100 100 Repository, RepoGroup, UserGroup. Also it put the default user in front
101 101 of all other resources
102 102 """
103 103
104 104 if obj.username == User.DEFAULT_USER:
105 105 return '#####'
106 106 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
107 107 extra_sort_num = '1' # default
108 108
109 109 # NOTE(dan): inactive duplicates goes last
110 110 if getattr(obj, 'duplicate_perm', None):
111 111 extra_sort_num = '9'
112 112 return prefix + extra_sort_num + obj.username
113 113
114 114
115 115 def display_user_group_sort(obj):
116 116 """
117 117 Sort function used to sort permissions in .permissions() function of
118 118 Repository, RepoGroup, UserGroup. Also it put the default user in front
119 119 of all other resources
120 120 """
121 121
122 122 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
123 123 return prefix + obj.users_group_name
124 124
125 125
126 126 def _hash_key(k):
127 127 return sha1_safe(k)
128 128
129 129
130 130 def in_filter_generator(qry, items, limit=500):
131 131 """
132 132 Splits IN() into multiple with OR
133 133 e.g.::
134 134 cnt = Repository.query().filter(
135 135 or_(
136 136 *in_filter_generator(Repository.repo_id, range(100000))
137 137 )).count()
138 138 """
139 139 if not items:
140 140 # empty list will cause empty query which might cause security issues
141 141 # this can lead to hidden unpleasant results
142 142 items = [-1]
143 143
144 144 parts = []
145 145 for chunk in xrange(0, len(items), limit):
146 146 parts.append(
147 147 qry.in_(items[chunk: chunk + limit])
148 148 )
149 149
150 150 return parts
151 151
152 152
153 153 base_table_args = {
154 154 'extend_existing': True,
155 155 'mysql_engine': 'InnoDB',
156 156 'mysql_charset': 'utf8',
157 157 'sqlite_autoincrement': True
158 158 }
159 159
160 160
161 161 class EncryptedTextValue(TypeDecorator):
162 162 """
163 163 Special column for encrypted long text data, use like::
164 164
165 165 value = Column("encrypted_value", EncryptedValue(), nullable=False)
166 166
167 167 This column is intelligent so if value is in unencrypted form it return
168 168 unencrypted form, but on save it always encrypts
169 169 """
170 170 impl = Text
171 171
172 172 def process_bind_param(self, value, dialect):
173 173 """
174 174 Setter for storing value
175 175 """
176 176 import rhodecode
177 177 if not value:
178 178 return value
179 179
180 180 # protect against double encrypting if values is already encrypted
181 181 if value.startswith('enc$aes$') \
182 182 or value.startswith('enc$aes_hmac$') \
183 183 or value.startswith('enc2$'):
184 184 raise ValueError('value needs to be in unencrypted format, '
185 185 'ie. not starting with enc$ or enc2$')
186 186
187 187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
188 188 if algo == 'aes':
189 189 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
190 190 elif algo == 'fernet':
191 191 return Encryptor(ENCRYPTION_KEY).encrypt(value)
192 192 else:
193 193 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
194 194
195 195 def process_result_value(self, value, dialect):
196 196 """
197 197 Getter for retrieving value
198 198 """
199 199
200 200 import rhodecode
201 201 if not value:
202 202 return value
203 203
204 204 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
205 205 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
206 206 if algo == 'aes':
207 207 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
208 208 elif algo == 'fernet':
209 209 return Encryptor(ENCRYPTION_KEY).decrypt(value)
210 210 else:
211 211 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
212 212 return decrypted_data
213 213
214 214
215 215 class BaseModel(object):
216 216 """
217 217 Base Model for all classes
218 218 """
219 219
220 220 @classmethod
221 221 def _get_keys(cls):
222 222 """return column names for this model """
223 223 return class_mapper(cls).c.keys()
224 224
225 225 def get_dict(self):
226 226 """
227 227 return dict with keys and values corresponding
228 228 to this model data """
229 229
230 230 d = {}
231 231 for k in self._get_keys():
232 232 d[k] = getattr(self, k)
233 233
234 234 # also use __json__() if present to get additional fields
235 235 _json_attr = getattr(self, '__json__', None)
236 236 if _json_attr:
237 237 # update with attributes from __json__
238 238 if callable(_json_attr):
239 239 _json_attr = _json_attr()
240 240 for k, val in _json_attr.iteritems():
241 241 d[k] = val
242 242 return d
243 243
244 244 def get_appstruct(self):
245 245 """return list with keys and values tuples corresponding
246 246 to this model data """
247 247
248 248 lst = []
249 249 for k in self._get_keys():
250 250 lst.append((k, getattr(self, k),))
251 251 return lst
252 252
253 253 def populate_obj(self, populate_dict):
254 254 """populate model with data from given populate_dict"""
255 255
256 256 for k in self._get_keys():
257 257 if k in populate_dict:
258 258 setattr(self, k, populate_dict[k])
259 259
260 260 @classmethod
261 261 def query(cls):
262 262 return Session().query(cls)
263 263
264 264 @classmethod
265 265 def get(cls, id_):
266 266 if id_:
267 267 return cls.query().get(id_)
268 268
269 269 @classmethod
270 270 def get_or_404(cls, id_):
271 271 from pyramid.httpexceptions import HTTPNotFound
272 272
273 273 try:
274 274 id_ = int(id_)
275 275 except (TypeError, ValueError):
276 276 raise HTTPNotFound()
277 277
278 278 res = cls.query().get(id_)
279 279 if not res:
280 280 raise HTTPNotFound()
281 281 return res
282 282
283 283 @classmethod
284 284 def getAll(cls):
285 285 # deprecated and left for backward compatibility
286 286 return cls.get_all()
287 287
288 288 @classmethod
289 289 def get_all(cls):
290 290 return cls.query().all()
291 291
292 292 @classmethod
293 293 def delete(cls, id_):
294 294 obj = cls.query().get(id_)
295 295 Session().delete(obj)
296 296
297 297 @classmethod
298 298 def identity_cache(cls, session, attr_name, value):
299 299 exist_in_session = []
300 300 for (item_cls, pkey), instance in session.identity_map.items():
301 301 if cls == item_cls and getattr(instance, attr_name) == value:
302 302 exist_in_session.append(instance)
303 303 if exist_in_session:
304 304 if len(exist_in_session) == 1:
305 305 return exist_in_session[0]
306 306 log.exception(
307 307 'multiple objects with attr %s and '
308 308 'value %s found with same name: %r',
309 309 attr_name, value, exist_in_session)
310 310
311 311 def __repr__(self):
312 312 if hasattr(self, '__unicode__'):
313 313 # python repr needs to return str
314 314 try:
315 315 return safe_str(self.__unicode__())
316 316 except UnicodeDecodeError:
317 317 pass
318 318 return '<DB:%s>' % (self.__class__.__name__)
319 319
320 320
321 321 class RhodeCodeSetting(Base, BaseModel):
322 322 __tablename__ = 'rhodecode_settings'
323 323 __table_args__ = (
324 324 UniqueConstraint('app_settings_name'),
325 325 base_table_args
326 326 )
327 327
328 328 SETTINGS_TYPES = {
329 329 'str': safe_str,
330 330 'int': safe_int,
331 331 'unicode': safe_unicode,
332 332 'bool': str2bool,
333 333 'list': functools.partial(aslist, sep=',')
334 334 }
335 335 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
336 336 GLOBAL_CONF_KEY = 'app_settings'
337 337
338 338 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
339 339 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
340 340 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
341 341 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
342 342
343 343 def __init__(self, key='', val='', type='unicode'):
344 344 self.app_settings_name = key
345 345 self.app_settings_type = type
346 346 self.app_settings_value = val
347 347
348 348 @validates('_app_settings_value')
349 349 def validate_settings_value(self, key, val):
350 350 assert type(val) == unicode
351 351 return val
352 352
353 353 @hybrid_property
354 354 def app_settings_value(self):
355 355 v = self._app_settings_value
356 356 _type = self.app_settings_type
357 357 if _type:
358 358 _type = self.app_settings_type.split('.')[0]
359 359 # decode the encrypted value
360 360 if 'encrypted' in self.app_settings_type:
361 361 cipher = EncryptedTextValue()
362 362 v = safe_unicode(cipher.process_result_value(v, None))
363 363
364 364 converter = self.SETTINGS_TYPES.get(_type) or \
365 365 self.SETTINGS_TYPES['unicode']
366 366 return converter(v)
367 367
368 368 @app_settings_value.setter
369 369 def app_settings_value(self, val):
370 370 """
371 371 Setter that will always make sure we use unicode in app_settings_value
372 372
373 373 :param val:
374 374 """
375 375 val = safe_unicode(val)
376 376 # encode the encrypted value
377 377 if 'encrypted' in self.app_settings_type:
378 378 cipher = EncryptedTextValue()
379 379 val = safe_unicode(cipher.process_bind_param(val, None))
380 380 self._app_settings_value = val
381 381
382 382 @hybrid_property
383 383 def app_settings_type(self):
384 384 return self._app_settings_type
385 385
386 386 @app_settings_type.setter
387 387 def app_settings_type(self, val):
388 388 if val.split('.')[0] not in self.SETTINGS_TYPES:
389 389 raise Exception('type must be one of %s got %s'
390 390 % (self.SETTINGS_TYPES.keys(), val))
391 391 self._app_settings_type = val
392 392
393 393 @classmethod
394 394 def get_by_prefix(cls, prefix):
395 395 return RhodeCodeSetting.query()\
396 396 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
397 397 .all()
398 398
399 399 def __unicode__(self):
400 400 return u"<%s('%s:%s[%s]')>" % (
401 401 self.__class__.__name__,
402 402 self.app_settings_name, self.app_settings_value,
403 403 self.app_settings_type
404 404 )
405 405
406 406
407 407 class RhodeCodeUi(Base, BaseModel):
408 408 __tablename__ = 'rhodecode_ui'
409 409 __table_args__ = (
410 410 UniqueConstraint('ui_key'),
411 411 base_table_args
412 412 )
413 413
414 414 HOOK_REPO_SIZE = 'changegroup.repo_size'
415 415 # HG
416 416 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
417 417 HOOK_PULL = 'outgoing.pull_logger'
418 418 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
419 419 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
420 420 HOOK_PUSH = 'changegroup.push_logger'
421 421 HOOK_PUSH_KEY = 'pushkey.key_push'
422 422
423 423 HOOKS_BUILTIN = [
424 424 HOOK_PRE_PULL,
425 425 HOOK_PULL,
426 426 HOOK_PRE_PUSH,
427 427 HOOK_PRETX_PUSH,
428 428 HOOK_PUSH,
429 429 HOOK_PUSH_KEY,
430 430 ]
431 431
432 432 # TODO: johbo: Unify way how hooks are configured for git and hg,
433 433 # git part is currently hardcoded.
434 434
435 435 # SVN PATTERNS
436 436 SVN_BRANCH_ID = 'vcs_svn_branch'
437 437 SVN_TAG_ID = 'vcs_svn_tag'
438 438
439 439 ui_id = Column(
440 440 "ui_id", Integer(), nullable=False, unique=True, default=None,
441 441 primary_key=True)
442 442 ui_section = Column(
443 443 "ui_section", String(255), nullable=True, unique=None, default=None)
444 444 ui_key = Column(
445 445 "ui_key", String(255), nullable=True, unique=None, default=None)
446 446 ui_value = Column(
447 447 "ui_value", String(255), nullable=True, unique=None, default=None)
448 448 ui_active = Column(
449 449 "ui_active", Boolean(), nullable=True, unique=None, default=True)
450 450
451 451 def __repr__(self):
452 452 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
453 453 self.ui_key, self.ui_value)
454 454
455 455
456 456 class RepoRhodeCodeSetting(Base, BaseModel):
457 457 __tablename__ = 'repo_rhodecode_settings'
458 458 __table_args__ = (
459 459 UniqueConstraint(
460 460 'app_settings_name', 'repository_id',
461 461 name='uq_repo_rhodecode_setting_name_repo_id'),
462 462 base_table_args
463 463 )
464 464
465 465 repository_id = Column(
466 466 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
467 467 nullable=False)
468 468 app_settings_id = Column(
469 469 "app_settings_id", Integer(), nullable=False, unique=True,
470 470 default=None, primary_key=True)
471 471 app_settings_name = Column(
472 472 "app_settings_name", String(255), nullable=True, unique=None,
473 473 default=None)
474 474 _app_settings_value = Column(
475 475 "app_settings_value", String(4096), nullable=True, unique=None,
476 476 default=None)
477 477 _app_settings_type = Column(
478 478 "app_settings_type", String(255), nullable=True, unique=None,
479 479 default=None)
480 480
481 481 repository = relationship('Repository')
482 482
483 483 def __init__(self, repository_id, key='', val='', type='unicode'):
484 484 self.repository_id = repository_id
485 485 self.app_settings_name = key
486 486 self.app_settings_type = type
487 487 self.app_settings_value = val
488 488
489 489 @validates('_app_settings_value')
490 490 def validate_settings_value(self, key, val):
491 491 assert type(val) == unicode
492 492 return val
493 493
494 494 @hybrid_property
495 495 def app_settings_value(self):
496 496 v = self._app_settings_value
497 497 type_ = self.app_settings_type
498 498 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
499 499 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
500 500 return converter(v)
501 501
502 502 @app_settings_value.setter
503 503 def app_settings_value(self, val):
504 504 """
505 505 Setter that will always make sure we use unicode in app_settings_value
506 506
507 507 :param val:
508 508 """
509 509 self._app_settings_value = safe_unicode(val)
510 510
511 511 @hybrid_property
512 512 def app_settings_type(self):
513 513 return self._app_settings_type
514 514
515 515 @app_settings_type.setter
516 516 def app_settings_type(self, val):
517 517 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
518 518 if val not in SETTINGS_TYPES:
519 519 raise Exception('type must be one of %s got %s'
520 520 % (SETTINGS_TYPES.keys(), val))
521 521 self._app_settings_type = val
522 522
523 523 def __unicode__(self):
524 524 return u"<%s('%s:%s:%s[%s]')>" % (
525 525 self.__class__.__name__, self.repository.repo_name,
526 526 self.app_settings_name, self.app_settings_value,
527 527 self.app_settings_type
528 528 )
529 529
530 530
531 531 class RepoRhodeCodeUi(Base, BaseModel):
532 532 __tablename__ = 'repo_rhodecode_ui'
533 533 __table_args__ = (
534 534 UniqueConstraint(
535 535 'repository_id', 'ui_section', 'ui_key',
536 536 name='uq_repo_rhodecode_ui_repository_id_section_key'),
537 537 base_table_args
538 538 )
539 539
540 540 repository_id = Column(
541 541 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
542 542 nullable=False)
543 543 ui_id = Column(
544 544 "ui_id", Integer(), nullable=False, unique=True, default=None,
545 545 primary_key=True)
546 546 ui_section = Column(
547 547 "ui_section", String(255), nullable=True, unique=None, default=None)
548 548 ui_key = Column(
549 549 "ui_key", String(255), nullable=True, unique=None, default=None)
550 550 ui_value = Column(
551 551 "ui_value", String(255), nullable=True, unique=None, default=None)
552 552 ui_active = Column(
553 553 "ui_active", Boolean(), nullable=True, unique=None, default=True)
554 554
555 555 repository = relationship('Repository')
556 556
557 557 def __repr__(self):
558 558 return '<%s[%s:%s]%s=>%s]>' % (
559 559 self.__class__.__name__, self.repository.repo_name,
560 560 self.ui_section, self.ui_key, self.ui_value)
561 561
562 562
563 563 class User(Base, BaseModel):
564 564 __tablename__ = 'users'
565 565 __table_args__ = (
566 566 UniqueConstraint('username'), UniqueConstraint('email'),
567 567 Index('u_username_idx', 'username'),
568 568 Index('u_email_idx', 'email'),
569 569 base_table_args
570 570 )
571 571
572 572 DEFAULT_USER = 'default'
573 573 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
574 574 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
575 575
576 576 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
577 577 username = Column("username", String(255), nullable=True, unique=None, default=None)
578 578 password = Column("password", String(255), nullable=True, unique=None, default=None)
579 579 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
580 580 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
581 581 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
582 582 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
583 583 _email = Column("email", String(255), nullable=True, unique=None, default=None)
584 584 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
585 585 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
586 586 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
587 587
588 588 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
589 589 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
590 590 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
591 591 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
592 592 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
593 593 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
594 594
595 595 user_log = relationship('UserLog')
596 596 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
597 597
598 598 repositories = relationship('Repository')
599 599 repository_groups = relationship('RepoGroup')
600 600 user_groups = relationship('UserGroup')
601 601
602 602 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
603 603 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
604 604
605 605 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
606 606 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
607 607 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
608 608
609 609 group_member = relationship('UserGroupMember', cascade='all')
610 610
611 611 notifications = relationship('UserNotification', cascade='all')
612 612 # notifications assigned to this user
613 613 user_created_notifications = relationship('Notification', cascade='all')
614 614 # comments created by this user
615 615 user_comments = relationship('ChangesetComment', cascade='all')
616 616 # user profile extra info
617 617 user_emails = relationship('UserEmailMap', cascade='all')
618 618 user_ip_map = relationship('UserIpMap', cascade='all')
619 619 user_auth_tokens = relationship('UserApiKeys', cascade='all')
620 620 user_ssh_keys = relationship('UserSshKeys', cascade='all')
621 621
622 622 # gists
623 623 user_gists = relationship('Gist', cascade='all')
624 624 # user pull requests
625 625 user_pull_requests = relationship('PullRequest', cascade='all')
626 626
627 627 # external identities
628 628 external_identities = relationship(
629 629 'ExternalIdentity',
630 630 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
631 631 cascade='all')
632 632 # review rules
633 633 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
634 634
635 635 # artifacts owned
636 636 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
637 637
638 638 # no cascade, set NULL
639 639 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
640 640
641 641 def __unicode__(self):
642 642 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
643 643 self.user_id, self.username)
644 644
645 645 @hybrid_property
646 646 def email(self):
647 647 return self._email
648 648
649 649 @email.setter
650 650 def email(self, val):
651 651 self._email = val.lower() if val else None
652 652
653 653 @hybrid_property
654 654 def first_name(self):
655 655 from rhodecode.lib import helpers as h
656 656 if self.name:
657 657 return h.escape(self.name)
658 658 return self.name
659 659
660 660 @hybrid_property
661 661 def last_name(self):
662 662 from rhodecode.lib import helpers as h
663 663 if self.lastname:
664 664 return h.escape(self.lastname)
665 665 return self.lastname
666 666
667 667 @hybrid_property
668 668 def api_key(self):
669 669 """
670 670 Fetch if exist an auth-token with role ALL connected to this user
671 671 """
672 672 user_auth_token = UserApiKeys.query()\
673 673 .filter(UserApiKeys.user_id == self.user_id)\
674 674 .filter(or_(UserApiKeys.expires == -1,
675 675 UserApiKeys.expires >= time.time()))\
676 676 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
677 677 if user_auth_token:
678 678 user_auth_token = user_auth_token.api_key
679 679
680 680 return user_auth_token
681 681
682 682 @api_key.setter
683 683 def api_key(self, val):
684 684 # don't allow to set API key this is deprecated for now
685 685 self._api_key = None
686 686
687 687 @property
688 688 def reviewer_pull_requests(self):
689 689 return PullRequestReviewers.query() \
690 690 .options(joinedload(PullRequestReviewers.pull_request)) \
691 691 .filter(PullRequestReviewers.user_id == self.user_id) \
692 692 .all()
693 693
694 694 @property
695 695 def firstname(self):
696 696 # alias for future
697 697 return self.name
698 698
699 699 @property
700 700 def emails(self):
701 701 other = UserEmailMap.query()\
702 702 .filter(UserEmailMap.user == self) \
703 703 .order_by(UserEmailMap.email_id.asc()) \
704 704 .all()
705 705 return [self.email] + [x.email for x in other]
706 706
707 707 def emails_cached(self):
708 708 emails = UserEmailMap.query()\
709 709 .filter(UserEmailMap.user == self) \
710 710 .order_by(UserEmailMap.email_id.asc())
711 711
712 712 emails = emails.options(
713 713 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
714 714 )
715 715
716 716 return [self.email] + [x.email for x in emails]
717 717
718 718 @property
719 719 def auth_tokens(self):
720 720 auth_tokens = self.get_auth_tokens()
721 721 return [x.api_key for x in auth_tokens]
722 722
723 723 def get_auth_tokens(self):
724 724 return UserApiKeys.query()\
725 725 .filter(UserApiKeys.user == self)\
726 726 .order_by(UserApiKeys.user_api_key_id.asc())\
727 727 .all()
728 728
729 729 @LazyProperty
730 730 def feed_token(self):
731 731 return self.get_feed_token()
732 732
733 733 def get_feed_token(self, cache=True):
734 734 feed_tokens = UserApiKeys.query()\
735 735 .filter(UserApiKeys.user == self)\
736 736 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
737 737 if cache:
738 738 feed_tokens = feed_tokens.options(
739 739 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
740 740
741 741 feed_tokens = feed_tokens.all()
742 742 if feed_tokens:
743 743 return feed_tokens[0].api_key
744 744 return 'NO_FEED_TOKEN_AVAILABLE'
745 745
746 746 @LazyProperty
747 747 def artifact_token(self):
748 748 return self.get_artifact_token()
749 749
750 750 def get_artifact_token(self, cache=True):
751 751 artifacts_tokens = UserApiKeys.query()\
752 752 .filter(UserApiKeys.user == self) \
753 753 .filter(or_(UserApiKeys.expires == -1,
754 754 UserApiKeys.expires >= time.time())) \
755 755 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
756 756
757 757 if cache:
758 758 artifacts_tokens = artifacts_tokens.options(
759 759 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
760 760
761 761 artifacts_tokens = artifacts_tokens.all()
762 762 if artifacts_tokens:
763 763 return artifacts_tokens[0].api_key
764 764 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
765 765
766 766 def get_or_create_artifact_token(self):
767 767 artifacts_tokens = UserApiKeys.query()\
768 768 .filter(UserApiKeys.user == self) \
769 769 .filter(or_(UserApiKeys.expires == -1,
770 770 UserApiKeys.expires >= time.time())) \
771 771 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
772 772
773 773 artifacts_tokens = artifacts_tokens.all()
774 774 if artifacts_tokens:
775 775 return artifacts_tokens[0].api_key
776 776 else:
777 777 from rhodecode.model.auth_token import AuthTokenModel
778 778 artifact_token = AuthTokenModel().create(
779 779 self, 'auto-generated-artifact-token',
780 780 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
781 781 Session.commit()
782 782 return artifact_token.api_key
783 783
784 784 @classmethod
785 785 def get(cls, user_id, cache=False):
786 786 if not user_id:
787 787 return
788 788
789 789 user = cls.query()
790 790 if cache:
791 791 user = user.options(
792 792 FromCache("sql_cache_short", "get_users_%s" % user_id))
793 793 return user.get(user_id)
794 794
795 795 @classmethod
796 796 def extra_valid_auth_tokens(cls, user, role=None):
797 797 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
798 798 .filter(or_(UserApiKeys.expires == -1,
799 799 UserApiKeys.expires >= time.time()))
800 800 if role:
801 801 tokens = tokens.filter(or_(UserApiKeys.role == role,
802 802 UserApiKeys.role == UserApiKeys.ROLE_ALL))
803 803 return tokens.all()
804 804
805 805 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
806 806 from rhodecode.lib import auth
807 807
808 808 log.debug('Trying to authenticate user: %s via auth-token, '
809 809 'and roles: %s', self, roles)
810 810
811 811 if not auth_token:
812 812 return False
813 813
814 814 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
815 815 tokens_q = UserApiKeys.query()\
816 816 .filter(UserApiKeys.user_id == self.user_id)\
817 817 .filter(or_(UserApiKeys.expires == -1,
818 818 UserApiKeys.expires >= time.time()))
819 819
820 820 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
821 821
822 822 crypto_backend = auth.crypto_backend()
823 823 enc_token_map = {}
824 824 plain_token_map = {}
825 825 for token in tokens_q:
826 826 if token.api_key.startswith(crypto_backend.ENC_PREF):
827 827 enc_token_map[token.api_key] = token
828 828 else:
829 829 plain_token_map[token.api_key] = token
830 830 log.debug(
831 831 'Found %s plain and %s encrypted tokens to check for authentication for this user',
832 832 len(plain_token_map), len(enc_token_map))
833 833
834 834 # plain token match comes first
835 835 match = plain_token_map.get(auth_token)
836 836
837 837 # check encrypted tokens now
838 838 if not match:
839 839 for token_hash, token in enc_token_map.items():
840 840 # NOTE(marcink): this is expensive to calculate, but most secure
841 841 if crypto_backend.hash_check(auth_token, token_hash):
842 842 match = token
843 843 break
844 844
845 845 if match:
846 846 log.debug('Found matching token %s', match)
847 847 if match.repo_id:
848 848 log.debug('Found scope, checking for scope match of token %s', match)
849 849 if match.repo_id == scope_repo_id:
850 850 return True
851 851 else:
852 852 log.debug(
853 853 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
854 854 'and calling scope is:%s, skipping further checks',
855 855 match.repo, scope_repo_id)
856 856 return False
857 857 else:
858 858 return True
859 859
860 860 return False
861 861
862 862 @property
863 863 def ip_addresses(self):
864 864 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
865 865 return [x.ip_addr for x in ret]
866 866
867 867 @property
868 868 def username_and_name(self):
869 869 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
870 870
871 871 @property
872 872 def username_or_name_or_email(self):
873 873 full_name = self.full_name if self.full_name is not ' ' else None
874 874 return self.username or full_name or self.email
875 875
876 876 @property
877 877 def full_name(self):
878 878 return '%s %s' % (self.first_name, self.last_name)
879 879
880 880 @property
881 881 def full_name_or_username(self):
882 882 return ('%s %s' % (self.first_name, self.last_name)
883 883 if (self.first_name and self.last_name) else self.username)
884 884
885 885 @property
886 886 def full_contact(self):
887 887 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
888 888
889 889 @property
890 890 def short_contact(self):
891 891 return '%s %s' % (self.first_name, self.last_name)
892 892
893 893 @property
894 894 def is_admin(self):
895 895 return self.admin
896 896
897 897 @property
898 898 def language(self):
899 899 return self.user_data.get('language')
900 900
901 901 def AuthUser(self, **kwargs):
902 902 """
903 903 Returns instance of AuthUser for this user
904 904 """
905 905 from rhodecode.lib.auth import AuthUser
906 906 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
907 907
908 908 @hybrid_property
909 909 def user_data(self):
910 910 if not self._user_data:
911 911 return {}
912 912
913 913 try:
914 914 return json.loads(self._user_data)
915 915 except TypeError:
916 916 return {}
917 917
918 918 @user_data.setter
919 919 def user_data(self, val):
920 920 if not isinstance(val, dict):
921 921 raise Exception('user_data must be dict, got %s' % type(val))
922 922 try:
923 923 self._user_data = json.dumps(val)
924 924 except Exception:
925 925 log.error(traceback.format_exc())
926 926
927 927 @classmethod
928 928 def get_by_username(cls, username, case_insensitive=False,
929 929 cache=False, identity_cache=False):
930 930 session = Session()
931 931
932 932 if case_insensitive:
933 933 q = cls.query().filter(
934 934 func.lower(cls.username) == func.lower(username))
935 935 else:
936 936 q = cls.query().filter(cls.username == username)
937 937
938 938 if cache:
939 939 if identity_cache:
940 940 val = cls.identity_cache(session, 'username', username)
941 941 if val:
942 942 return val
943 943 else:
944 944 cache_key = "get_user_by_name_%s" % _hash_key(username)
945 945 q = q.options(
946 946 FromCache("sql_cache_short", cache_key))
947 947
948 948 return q.scalar()
949 949
950 950 @classmethod
951 951 def get_by_auth_token(cls, auth_token, cache=False):
952 952 q = UserApiKeys.query()\
953 953 .filter(UserApiKeys.api_key == auth_token)\
954 954 .filter(or_(UserApiKeys.expires == -1,
955 955 UserApiKeys.expires >= time.time()))
956 956 if cache:
957 957 q = q.options(
958 958 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
959 959
960 960 match = q.first()
961 961 if match:
962 962 return match.user
963 963
964 964 @classmethod
965 965 def get_by_email(cls, email, case_insensitive=False, cache=False):
966 966
967 967 if case_insensitive:
968 968 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
969 969
970 970 else:
971 971 q = cls.query().filter(cls.email == email)
972 972
973 973 email_key = _hash_key(email)
974 974 if cache:
975 975 q = q.options(
976 976 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
977 977
978 978 ret = q.scalar()
979 979 if ret is None:
980 980 q = UserEmailMap.query()
981 981 # try fetching in alternate email map
982 982 if case_insensitive:
983 983 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
984 984 else:
985 985 q = q.filter(UserEmailMap.email == email)
986 986 q = q.options(joinedload(UserEmailMap.user))
987 987 if cache:
988 988 q = q.options(
989 989 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
990 990 ret = getattr(q.scalar(), 'user', None)
991 991
992 992 return ret
993 993
994 994 @classmethod
995 995 def get_from_cs_author(cls, author):
996 996 """
997 997 Tries to get User objects out of commit author string
998 998
999 999 :param author:
1000 1000 """
1001 1001 from rhodecode.lib.helpers import email, author_name
1002 1002 # Valid email in the attribute passed, see if they're in the system
1003 1003 _email = email(author)
1004 1004 if _email:
1005 1005 user = cls.get_by_email(_email, case_insensitive=True)
1006 1006 if user:
1007 1007 return user
1008 1008 # Maybe we can match by username?
1009 1009 _author = author_name(author)
1010 1010 user = cls.get_by_username(_author, case_insensitive=True)
1011 1011 if user:
1012 1012 return user
1013 1013
1014 1014 def update_userdata(self, **kwargs):
1015 1015 usr = self
1016 1016 old = usr.user_data
1017 1017 old.update(**kwargs)
1018 1018 usr.user_data = old
1019 1019 Session().add(usr)
1020 1020 log.debug('updated userdata with %s', kwargs)
1021 1021
1022 1022 def update_lastlogin(self):
1023 1023 """Update user lastlogin"""
1024 1024 self.last_login = datetime.datetime.now()
1025 1025 Session().add(self)
1026 1026 log.debug('updated user %s lastlogin', self.username)
1027 1027
1028 1028 def update_password(self, new_password):
1029 1029 from rhodecode.lib.auth import get_crypt_password
1030 1030
1031 1031 self.password = get_crypt_password(new_password)
1032 1032 Session().add(self)
1033 1033
1034 1034 @classmethod
1035 1035 def get_first_super_admin(cls):
1036 1036 user = User.query()\
1037 1037 .filter(User.admin == true()) \
1038 1038 .order_by(User.user_id.asc()) \
1039 1039 .first()
1040 1040
1041 1041 if user is None:
1042 1042 raise Exception('FATAL: Missing administrative account!')
1043 1043 return user
1044 1044
1045 1045 @classmethod
1046 1046 def get_all_super_admins(cls, only_active=False):
1047 1047 """
1048 1048 Returns all admin accounts sorted by username
1049 1049 """
1050 1050 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1051 1051 if only_active:
1052 1052 qry = qry.filter(User.active == true())
1053 1053 return qry.all()
1054 1054
1055 1055 @classmethod
1056 1056 def get_all_user_ids(cls, only_active=True):
1057 1057 """
1058 1058 Returns all users IDs
1059 1059 """
1060 1060 qry = Session().query(User.user_id)
1061 1061
1062 1062 if only_active:
1063 1063 qry = qry.filter(User.active == true())
1064 1064 return [x.user_id for x in qry]
1065 1065
1066 1066 @classmethod
1067 1067 def get_default_user(cls, cache=False, refresh=False):
1068 1068 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1069 1069 if user is None:
1070 1070 raise Exception('FATAL: Missing default account!')
1071 1071 if refresh:
1072 1072 # The default user might be based on outdated state which
1073 1073 # has been loaded from the cache.
1074 1074 # A call to refresh() ensures that the
1075 1075 # latest state from the database is used.
1076 1076 Session().refresh(user)
1077 1077 return user
1078 1078
1079 1079 @classmethod
1080 1080 def get_default_user_id(cls):
1081 1081 import rhodecode
1082 1082 return rhodecode.CONFIG['default_user_id']
1083 1083
1084 1084 def _get_default_perms(self, user, suffix=''):
1085 1085 from rhodecode.model.permission import PermissionModel
1086 1086 return PermissionModel().get_default_perms(user.user_perms, suffix)
1087 1087
1088 1088 def get_default_perms(self, suffix=''):
1089 1089 return self._get_default_perms(self, suffix)
1090 1090
1091 1091 def get_api_data(self, include_secrets=False, details='full'):
1092 1092 """
1093 1093 Common function for generating user related data for API
1094 1094
1095 1095 :param include_secrets: By default secrets in the API data will be replaced
1096 1096 by a placeholder value to prevent exposing this data by accident. In case
1097 1097 this data shall be exposed, set this flag to ``True``.
1098 1098
1099 1099 :param details: details can be 'basic|full' basic gives only a subset of
1100 1100 the available user information that includes user_id, name and emails.
1101 1101 """
1102 1102 user = self
1103 1103 user_data = self.user_data
1104 1104 data = {
1105 1105 'user_id': user.user_id,
1106 1106 'username': user.username,
1107 1107 'firstname': user.name,
1108 1108 'lastname': user.lastname,
1109 1109 'description': user.description,
1110 1110 'email': user.email,
1111 1111 'emails': user.emails,
1112 1112 }
1113 1113 if details == 'basic':
1114 1114 return data
1115 1115
1116 1116 auth_token_length = 40
1117 1117 auth_token_replacement = '*' * auth_token_length
1118 1118
1119 1119 extras = {
1120 1120 'auth_tokens': [auth_token_replacement],
1121 1121 'active': user.active,
1122 1122 'admin': user.admin,
1123 1123 'extern_type': user.extern_type,
1124 1124 'extern_name': user.extern_name,
1125 1125 'last_login': user.last_login,
1126 1126 'last_activity': user.last_activity,
1127 1127 'ip_addresses': user.ip_addresses,
1128 1128 'language': user_data.get('language')
1129 1129 }
1130 1130 data.update(extras)
1131 1131
1132 1132 if include_secrets:
1133 1133 data['auth_tokens'] = user.auth_tokens
1134 1134 return data
1135 1135
1136 1136 def __json__(self):
1137 1137 data = {
1138 1138 'full_name': self.full_name,
1139 1139 'full_name_or_username': self.full_name_or_username,
1140 1140 'short_contact': self.short_contact,
1141 1141 'full_contact': self.full_contact,
1142 1142 }
1143 1143 data.update(self.get_api_data())
1144 1144 return data
1145 1145
1146 1146
1147 1147 class UserApiKeys(Base, BaseModel):
1148 1148 __tablename__ = 'user_api_keys'
1149 1149 __table_args__ = (
1150 1150 Index('uak_api_key_idx', 'api_key'),
1151 1151 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1152 1152 base_table_args
1153 1153 )
1154 1154 __mapper_args__ = {}
1155 1155
1156 1156 # ApiKey role
1157 1157 ROLE_ALL = 'token_role_all'
1158 1158 ROLE_VCS = 'token_role_vcs'
1159 1159 ROLE_API = 'token_role_api'
1160 1160 ROLE_HTTP = 'token_role_http'
1161 1161 ROLE_FEED = 'token_role_feed'
1162 1162 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1163 1163 # The last one is ignored in the list as we only
1164 1164 # use it for one action, and cannot be created by users
1165 1165 ROLE_PASSWORD_RESET = 'token_password_reset'
1166 1166
1167 1167 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1168 1168
1169 1169 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1170 1170 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1171 1171 api_key = Column("api_key", String(255), nullable=False, unique=True)
1172 1172 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1173 1173 expires = Column('expires', Float(53), nullable=False)
1174 1174 role = Column('role', String(255), nullable=True)
1175 1175 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1176 1176
1177 1177 # scope columns
1178 1178 repo_id = Column(
1179 1179 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1180 1180 nullable=True, unique=None, default=None)
1181 1181 repo = relationship('Repository', lazy='joined')
1182 1182
1183 1183 repo_group_id = Column(
1184 1184 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1185 1185 nullable=True, unique=None, default=None)
1186 1186 repo_group = relationship('RepoGroup', lazy='joined')
1187 1187
1188 1188 user = relationship('User', lazy='joined')
1189 1189
1190 1190 def __unicode__(self):
1191 1191 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1192 1192
1193 1193 def __json__(self):
1194 1194 data = {
1195 1195 'auth_token': self.api_key,
1196 1196 'role': self.role,
1197 1197 'scope': self.scope_humanized,
1198 1198 'expired': self.expired
1199 1199 }
1200 1200 return data
1201 1201
1202 1202 def get_api_data(self, include_secrets=False):
1203 1203 data = self.__json__()
1204 1204 if include_secrets:
1205 1205 return data
1206 1206 else:
1207 1207 data['auth_token'] = self.token_obfuscated
1208 1208 return data
1209 1209
1210 1210 @hybrid_property
1211 1211 def description_safe(self):
1212 1212 from rhodecode.lib import helpers as h
1213 1213 return h.escape(self.description)
1214 1214
1215 1215 @property
1216 1216 def expired(self):
1217 1217 if self.expires == -1:
1218 1218 return False
1219 1219 return time.time() > self.expires
1220 1220
1221 1221 @classmethod
1222 1222 def _get_role_name(cls, role):
1223 1223 return {
1224 1224 cls.ROLE_ALL: _('all'),
1225 1225 cls.ROLE_HTTP: _('http/web interface'),
1226 1226 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1227 1227 cls.ROLE_API: _('api calls'),
1228 1228 cls.ROLE_FEED: _('feed access'),
1229 1229 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1230 1230 }.get(role, role)
1231 1231
1232 1232 @classmethod
1233 1233 def _get_role_description(cls, role):
1234 1234 return {
1235 1235 cls.ROLE_ALL: _('Token for all actions.'),
1236 1236 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1237 1237 'login using `api_access_controllers_whitelist` functionality.'),
1238 1238 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1239 1239 'Requires auth_token authentication plugin to be active. <br/>'
1240 1240 'Such Token should be used then instead of a password to '
1241 1241 'interact with a repository, and additionally can be '
1242 1242 'limited to single repository using repo scope.'),
1243 1243 cls.ROLE_API: _('Token limited to api calls.'),
1244 1244 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1245 1245 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1246 1246 }.get(role, role)
1247 1247
1248 1248 @property
1249 1249 def role_humanized(self):
1250 1250 return self._get_role_name(self.role)
1251 1251
1252 1252 def _get_scope(self):
1253 1253 if self.repo:
1254 1254 return 'Repository: {}'.format(self.repo.repo_name)
1255 1255 if self.repo_group:
1256 1256 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1257 1257 return 'Global'
1258 1258
1259 1259 @property
1260 1260 def scope_humanized(self):
1261 1261 return self._get_scope()
1262 1262
1263 1263 @property
1264 1264 def token_obfuscated(self):
1265 1265 if self.api_key:
1266 1266 return self.api_key[:4] + "****"
1267 1267
1268 1268
1269 1269 class UserEmailMap(Base, BaseModel):
1270 1270 __tablename__ = 'user_email_map'
1271 1271 __table_args__ = (
1272 1272 Index('uem_email_idx', 'email'),
1273 1273 UniqueConstraint('email'),
1274 1274 base_table_args
1275 1275 )
1276 1276 __mapper_args__ = {}
1277 1277
1278 1278 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1279 1279 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1280 1280 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1281 1281 user = relationship('User', lazy='joined')
1282 1282
1283 1283 @validates('_email')
1284 1284 def validate_email(self, key, email):
1285 1285 # check if this email is not main one
1286 1286 main_email = Session().query(User).filter(User.email == email).scalar()
1287 1287 if main_email is not None:
1288 1288 raise AttributeError('email %s is present is user table' % email)
1289 1289 return email
1290 1290
1291 1291 @hybrid_property
1292 1292 def email(self):
1293 1293 return self._email
1294 1294
1295 1295 @email.setter
1296 1296 def email(self, val):
1297 1297 self._email = val.lower() if val else None
1298 1298
1299 1299
1300 1300 class UserIpMap(Base, BaseModel):
1301 1301 __tablename__ = 'user_ip_map'
1302 1302 __table_args__ = (
1303 1303 UniqueConstraint('user_id', 'ip_addr'),
1304 1304 base_table_args
1305 1305 )
1306 1306 __mapper_args__ = {}
1307 1307
1308 1308 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1309 1309 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1310 1310 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1311 1311 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1312 1312 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1313 1313 user = relationship('User', lazy='joined')
1314 1314
1315 1315 @hybrid_property
1316 1316 def description_safe(self):
1317 1317 from rhodecode.lib import helpers as h
1318 1318 return h.escape(self.description)
1319 1319
1320 1320 @classmethod
1321 1321 def _get_ip_range(cls, ip_addr):
1322 1322 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1323 1323 return [str(net.network_address), str(net.broadcast_address)]
1324 1324
1325 1325 def __json__(self):
1326 1326 return {
1327 1327 'ip_addr': self.ip_addr,
1328 1328 'ip_range': self._get_ip_range(self.ip_addr),
1329 1329 }
1330 1330
1331 1331 def __unicode__(self):
1332 1332 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1333 1333 self.user_id, self.ip_addr)
1334 1334
1335 1335
1336 1336 class UserSshKeys(Base, BaseModel):
1337 1337 __tablename__ = 'user_ssh_keys'
1338 1338 __table_args__ = (
1339 1339 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1340 1340
1341 1341 UniqueConstraint('ssh_key_fingerprint'),
1342 1342
1343 1343 base_table_args
1344 1344 )
1345 1345 __mapper_args__ = {}
1346 1346
1347 1347 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1348 1348 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1349 1349 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1350 1350
1351 1351 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1352 1352
1353 1353 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1354 1354 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1355 1355 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1356 1356
1357 1357 user = relationship('User', lazy='joined')
1358 1358
1359 1359 def __json__(self):
1360 1360 data = {
1361 1361 'ssh_fingerprint': self.ssh_key_fingerprint,
1362 1362 'description': self.description,
1363 1363 'created_on': self.created_on
1364 1364 }
1365 1365 return data
1366 1366
1367 1367 def get_api_data(self):
1368 1368 data = self.__json__()
1369 1369 return data
1370 1370
1371 1371
1372 1372 class UserLog(Base, BaseModel):
1373 1373 __tablename__ = 'user_logs'
1374 1374 __table_args__ = (
1375 1375 base_table_args,
1376 1376 )
1377 1377
1378 1378 VERSION_1 = 'v1'
1379 1379 VERSION_2 = 'v2'
1380 1380 VERSIONS = [VERSION_1, VERSION_2]
1381 1381
1382 1382 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1383 1383 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1384 1384 username = Column("username", String(255), nullable=True, unique=None, default=None)
1385 1385 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1386 1386 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1387 1387 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1388 1388 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1389 1389 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1390 1390
1391 1391 version = Column("version", String(255), nullable=True, default=VERSION_1)
1392 1392 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1393 1393 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1394 1394
1395 1395 def __unicode__(self):
1396 1396 return u"<%s('id:%s:%s')>" % (
1397 1397 self.__class__.__name__, self.repository_name, self.action)
1398 1398
1399 1399 def __json__(self):
1400 1400 return {
1401 1401 'user_id': self.user_id,
1402 1402 'username': self.username,
1403 1403 'repository_id': self.repository_id,
1404 1404 'repository_name': self.repository_name,
1405 1405 'user_ip': self.user_ip,
1406 1406 'action_date': self.action_date,
1407 1407 'action': self.action,
1408 1408 }
1409 1409
1410 1410 @hybrid_property
1411 1411 def entry_id(self):
1412 1412 return self.user_log_id
1413 1413
1414 1414 @property
1415 1415 def action_as_day(self):
1416 1416 return datetime.date(*self.action_date.timetuple()[:3])
1417 1417
1418 1418 user = relationship('User')
1419 1419 repository = relationship('Repository', cascade='')
1420 1420
1421 1421
1422 1422 class UserGroup(Base, BaseModel):
1423 1423 __tablename__ = 'users_groups'
1424 1424 __table_args__ = (
1425 1425 base_table_args,
1426 1426 )
1427 1427
1428 1428 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1429 1429 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1430 1430 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1431 1431 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1432 1432 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1433 1433 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1434 1434 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1435 1435 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1436 1436
1437 1437 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1438 1438 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1439 1439 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1440 1440 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1441 1441 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1442 1442 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1443 1443
1444 1444 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1445 1445 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1446 1446
1447 1447 @classmethod
1448 1448 def _load_group_data(cls, column):
1449 1449 if not column:
1450 1450 return {}
1451 1451
1452 1452 try:
1453 1453 return json.loads(column) or {}
1454 1454 except TypeError:
1455 1455 return {}
1456 1456
1457 1457 @hybrid_property
1458 1458 def description_safe(self):
1459 1459 from rhodecode.lib import helpers as h
1460 1460 return h.escape(self.user_group_description)
1461 1461
1462 1462 @hybrid_property
1463 1463 def group_data(self):
1464 1464 return self._load_group_data(self._group_data)
1465 1465
1466 1466 @group_data.expression
1467 1467 def group_data(self, **kwargs):
1468 1468 return self._group_data
1469 1469
1470 1470 @group_data.setter
1471 1471 def group_data(self, val):
1472 1472 try:
1473 1473 self._group_data = json.dumps(val)
1474 1474 except Exception:
1475 1475 log.error(traceback.format_exc())
1476 1476
1477 1477 @classmethod
1478 1478 def _load_sync(cls, group_data):
1479 1479 if group_data:
1480 1480 return group_data.get('extern_type')
1481 1481
1482 1482 @property
1483 1483 def sync(self):
1484 1484 return self._load_sync(self.group_data)
1485 1485
1486 1486 def __unicode__(self):
1487 1487 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1488 1488 self.users_group_id,
1489 1489 self.users_group_name)
1490 1490
1491 1491 @classmethod
1492 1492 def get_by_group_name(cls, group_name, cache=False,
1493 1493 case_insensitive=False):
1494 1494 if case_insensitive:
1495 1495 q = cls.query().filter(func.lower(cls.users_group_name) ==
1496 1496 func.lower(group_name))
1497 1497
1498 1498 else:
1499 1499 q = cls.query().filter(cls.users_group_name == group_name)
1500 1500 if cache:
1501 1501 q = q.options(
1502 1502 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1503 1503 return q.scalar()
1504 1504
1505 1505 @classmethod
1506 1506 def get(cls, user_group_id, cache=False):
1507 1507 if not user_group_id:
1508 1508 return
1509 1509
1510 1510 user_group = cls.query()
1511 1511 if cache:
1512 1512 user_group = user_group.options(
1513 1513 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1514 1514 return user_group.get(user_group_id)
1515 1515
1516 1516 def permissions(self, with_admins=True, with_owner=True,
1517 1517 expand_from_user_groups=False):
1518 1518 """
1519 1519 Permissions for user groups
1520 1520 """
1521 1521 _admin_perm = 'usergroup.admin'
1522 1522
1523 1523 owner_row = []
1524 1524 if with_owner:
1525 1525 usr = AttributeDict(self.user.get_dict())
1526 1526 usr.owner_row = True
1527 1527 usr.permission = _admin_perm
1528 1528 owner_row.append(usr)
1529 1529
1530 1530 super_admin_ids = []
1531 1531 super_admin_rows = []
1532 1532 if with_admins:
1533 1533 for usr in User.get_all_super_admins():
1534 1534 super_admin_ids.append(usr.user_id)
1535 1535 # if this admin is also owner, don't double the record
1536 1536 if usr.user_id == owner_row[0].user_id:
1537 1537 owner_row[0].admin_row = True
1538 1538 else:
1539 1539 usr = AttributeDict(usr.get_dict())
1540 1540 usr.admin_row = True
1541 1541 usr.permission = _admin_perm
1542 1542 super_admin_rows.append(usr)
1543 1543
1544 1544 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1545 1545 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1546 1546 joinedload(UserUserGroupToPerm.user),
1547 1547 joinedload(UserUserGroupToPerm.permission),)
1548 1548
1549 1549 # get owners and admins and permissions. We do a trick of re-writing
1550 1550 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1551 1551 # has a global reference and changing one object propagates to all
1552 1552 # others. This means if admin is also an owner admin_row that change
1553 1553 # would propagate to both objects
1554 1554 perm_rows = []
1555 1555 for _usr in q.all():
1556 1556 usr = AttributeDict(_usr.user.get_dict())
1557 1557 # if this user is also owner/admin, mark as duplicate record
1558 1558 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1559 1559 usr.duplicate_perm = True
1560 1560 usr.permission = _usr.permission.permission_name
1561 1561 perm_rows.append(usr)
1562 1562
1563 1563 # filter the perm rows by 'default' first and then sort them by
1564 1564 # admin,write,read,none permissions sorted again alphabetically in
1565 1565 # each group
1566 1566 perm_rows = sorted(perm_rows, key=display_user_sort)
1567 1567
1568 1568 user_groups_rows = []
1569 1569 if expand_from_user_groups:
1570 1570 for ug in self.permission_user_groups(with_members=True):
1571 1571 for user_data in ug.members:
1572 1572 user_groups_rows.append(user_data)
1573 1573
1574 1574 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1575 1575
1576 1576 def permission_user_groups(self, with_members=False):
1577 1577 q = UserGroupUserGroupToPerm.query()\
1578 1578 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1579 1579 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1580 1580 joinedload(UserGroupUserGroupToPerm.target_user_group),
1581 1581 joinedload(UserGroupUserGroupToPerm.permission),)
1582 1582
1583 1583 perm_rows = []
1584 1584 for _user_group in q.all():
1585 1585 entry = AttributeDict(_user_group.user_group.get_dict())
1586 1586 entry.permission = _user_group.permission.permission_name
1587 1587 if with_members:
1588 1588 entry.members = [x.user.get_dict()
1589 1589 for x in _user_group.user_group.members]
1590 1590 perm_rows.append(entry)
1591 1591
1592 1592 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1593 1593 return perm_rows
1594 1594
1595 1595 def _get_default_perms(self, user_group, suffix=''):
1596 1596 from rhodecode.model.permission import PermissionModel
1597 1597 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1598 1598
1599 1599 def get_default_perms(self, suffix=''):
1600 1600 return self._get_default_perms(self, suffix)
1601 1601
1602 1602 def get_api_data(self, with_group_members=True, include_secrets=False):
1603 1603 """
1604 1604 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1605 1605 basically forwarded.
1606 1606
1607 1607 """
1608 1608 user_group = self
1609 1609 data = {
1610 1610 'users_group_id': user_group.users_group_id,
1611 1611 'group_name': user_group.users_group_name,
1612 1612 'group_description': user_group.user_group_description,
1613 1613 'active': user_group.users_group_active,
1614 1614 'owner': user_group.user.username,
1615 1615 'sync': user_group.sync,
1616 1616 'owner_email': user_group.user.email,
1617 1617 }
1618 1618
1619 1619 if with_group_members:
1620 1620 users = []
1621 1621 for user in user_group.members:
1622 1622 user = user.user
1623 1623 users.append(user.get_api_data(include_secrets=include_secrets))
1624 1624 data['users'] = users
1625 1625
1626 1626 return data
1627 1627
1628 1628
1629 1629 class UserGroupMember(Base, BaseModel):
1630 1630 __tablename__ = 'users_groups_members'
1631 1631 __table_args__ = (
1632 1632 base_table_args,
1633 1633 )
1634 1634
1635 1635 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1636 1636 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1637 1637 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1638 1638
1639 1639 user = relationship('User', lazy='joined')
1640 1640 users_group = relationship('UserGroup')
1641 1641
1642 1642 def __init__(self, gr_id='', u_id=''):
1643 1643 self.users_group_id = gr_id
1644 1644 self.user_id = u_id
1645 1645
1646 1646
1647 1647 class RepositoryField(Base, BaseModel):
1648 1648 __tablename__ = 'repositories_fields'
1649 1649 __table_args__ = (
1650 1650 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1651 1651 base_table_args,
1652 1652 )
1653 1653
1654 1654 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1655 1655
1656 1656 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1657 1657 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1658 1658 field_key = Column("field_key", String(250))
1659 1659 field_label = Column("field_label", String(1024), nullable=False)
1660 1660 field_value = Column("field_value", String(10000), nullable=False)
1661 1661 field_desc = Column("field_desc", String(1024), nullable=False)
1662 1662 field_type = Column("field_type", String(255), nullable=False, unique=None)
1663 1663 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1664 1664
1665 1665 repository = relationship('Repository')
1666 1666
1667 1667 @property
1668 1668 def field_key_prefixed(self):
1669 1669 return 'ex_%s' % self.field_key
1670 1670
1671 1671 @classmethod
1672 1672 def un_prefix_key(cls, key):
1673 1673 if key.startswith(cls.PREFIX):
1674 1674 return key[len(cls.PREFIX):]
1675 1675 return key
1676 1676
1677 1677 @classmethod
1678 1678 def get_by_key_name(cls, key, repo):
1679 1679 row = cls.query()\
1680 1680 .filter(cls.repository == repo)\
1681 1681 .filter(cls.field_key == key).scalar()
1682 1682 return row
1683 1683
1684 1684
1685 1685 class Repository(Base, BaseModel):
1686 1686 __tablename__ = 'repositories'
1687 1687 __table_args__ = (
1688 1688 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1689 1689 base_table_args,
1690 1690 )
1691 1691 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1692 1692 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1693 1693 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1694 1694
1695 1695 STATE_CREATED = 'repo_state_created'
1696 1696 STATE_PENDING = 'repo_state_pending'
1697 1697 STATE_ERROR = 'repo_state_error'
1698 1698
1699 1699 LOCK_AUTOMATIC = 'lock_auto'
1700 1700 LOCK_API = 'lock_api'
1701 1701 LOCK_WEB = 'lock_web'
1702 1702 LOCK_PULL = 'lock_pull'
1703 1703
1704 1704 NAME_SEP = URL_SEP
1705 1705
1706 1706 repo_id = Column(
1707 1707 "repo_id", Integer(), nullable=False, unique=True, default=None,
1708 1708 primary_key=True)
1709 1709 _repo_name = Column(
1710 1710 "repo_name", Text(), nullable=False, default=None)
1711 1711 repo_name_hash = Column(
1712 1712 "repo_name_hash", String(255), nullable=False, unique=True)
1713 1713 repo_state = Column("repo_state", String(255), nullable=True)
1714 1714
1715 1715 clone_uri = Column(
1716 1716 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1717 1717 default=None)
1718 1718 push_uri = Column(
1719 1719 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1720 1720 default=None)
1721 1721 repo_type = Column(
1722 1722 "repo_type", String(255), nullable=False, unique=False, default=None)
1723 1723 user_id = Column(
1724 1724 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1725 1725 unique=False, default=None)
1726 1726 private = Column(
1727 1727 "private", Boolean(), nullable=True, unique=None, default=None)
1728 1728 archived = Column(
1729 1729 "archived", Boolean(), nullable=True, unique=None, default=None)
1730 1730 enable_statistics = Column(
1731 1731 "statistics", Boolean(), nullable=True, unique=None, default=True)
1732 1732 enable_downloads = Column(
1733 1733 "downloads", Boolean(), nullable=True, unique=None, default=True)
1734 1734 description = Column(
1735 1735 "description", String(10000), nullable=True, unique=None, default=None)
1736 1736 created_on = Column(
1737 1737 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1738 1738 default=datetime.datetime.now)
1739 1739 updated_on = Column(
1740 1740 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1741 1741 default=datetime.datetime.now)
1742 1742 _landing_revision = Column(
1743 1743 "landing_revision", String(255), nullable=False, unique=False,
1744 1744 default=None)
1745 1745 enable_locking = Column(
1746 1746 "enable_locking", Boolean(), nullable=False, unique=None,
1747 1747 default=False)
1748 1748 _locked = Column(
1749 1749 "locked", String(255), nullable=True, unique=False, default=None)
1750 1750 _changeset_cache = Column(
1751 1751 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1752 1752
1753 1753 fork_id = Column(
1754 1754 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1755 1755 nullable=True, unique=False, default=None)
1756 1756 group_id = Column(
1757 1757 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1758 1758 unique=False, default=None)
1759 1759
1760 1760 user = relationship('User', lazy='joined')
1761 1761 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1762 1762 group = relationship('RepoGroup', lazy='joined')
1763 1763 repo_to_perm = relationship(
1764 1764 'UserRepoToPerm', cascade='all',
1765 1765 order_by='UserRepoToPerm.repo_to_perm_id')
1766 1766 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1767 1767 stats = relationship('Statistics', cascade='all', uselist=False)
1768 1768
1769 1769 followers = relationship(
1770 1770 'UserFollowing',
1771 1771 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1772 1772 cascade='all')
1773 1773 extra_fields = relationship(
1774 1774 'RepositoryField', cascade="all, delete-orphan")
1775 1775 logs = relationship('UserLog')
1776 1776 comments = relationship(
1777 1777 'ChangesetComment', cascade="all, delete-orphan")
1778 1778 pull_requests_source = relationship(
1779 1779 'PullRequest',
1780 1780 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1781 1781 cascade="all, delete-orphan")
1782 1782 pull_requests_target = relationship(
1783 1783 'PullRequest',
1784 1784 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1785 1785 cascade="all, delete-orphan")
1786 1786 ui = relationship('RepoRhodeCodeUi', cascade="all")
1787 1787 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1788 1788 integrations = relationship('Integration', cascade="all, delete-orphan")
1789 1789
1790 1790 scoped_tokens = relationship('UserApiKeys', cascade="all")
1791 1791
1792 1792 # no cascade, set NULL
1793 1793 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1794 1794
1795 1795 def __unicode__(self):
1796 1796 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1797 1797 safe_unicode(self.repo_name))
1798 1798
1799 1799 @hybrid_property
1800 1800 def description_safe(self):
1801 1801 from rhodecode.lib import helpers as h
1802 1802 return h.escape(self.description)
1803 1803
1804 1804 @hybrid_property
1805 1805 def landing_rev(self):
1806 1806 # always should return [rev_type, rev], e.g ['branch', 'master']
1807 1807 if self._landing_revision:
1808 1808 _rev_info = self._landing_revision.split(':')
1809 1809 if len(_rev_info) < 2:
1810 1810 _rev_info.insert(0, 'rev')
1811 1811 return [_rev_info[0], _rev_info[1]]
1812 1812 return [None, None]
1813 1813
1814 1814 @property
1815 1815 def landing_ref_type(self):
1816 1816 return self.landing_rev[0]
1817 1817
1818 1818 @property
1819 1819 def landing_ref_name(self):
1820 1820 return self.landing_rev[1]
1821 1821
1822 1822 @landing_rev.setter
1823 1823 def landing_rev(self, val):
1824 1824 if ':' not in val:
1825 1825 raise ValueError('value must be delimited with `:` and consist '
1826 1826 'of <rev_type>:<rev>, got %s instead' % val)
1827 1827 self._landing_revision = val
1828 1828
1829 1829 @hybrid_property
1830 1830 def locked(self):
1831 1831 if self._locked:
1832 1832 user_id, timelocked, reason = self._locked.split(':')
1833 1833 lock_values = int(user_id), timelocked, reason
1834 1834 else:
1835 1835 lock_values = [None, None, None]
1836 1836 return lock_values
1837 1837
1838 1838 @locked.setter
1839 1839 def locked(self, val):
1840 1840 if val and isinstance(val, (list, tuple)):
1841 1841 self._locked = ':'.join(map(str, val))
1842 1842 else:
1843 1843 self._locked = None
1844 1844
1845 1845 @classmethod
1846 1846 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1847 1847 from rhodecode.lib.vcs.backends.base import EmptyCommit
1848 1848 dummy = EmptyCommit().__json__()
1849 1849 if not changeset_cache_raw:
1850 1850 dummy['source_repo_id'] = repo_id
1851 1851 return json.loads(json.dumps(dummy))
1852 1852
1853 1853 try:
1854 1854 return json.loads(changeset_cache_raw)
1855 1855 except TypeError:
1856 1856 return dummy
1857 1857 except Exception:
1858 1858 log.error(traceback.format_exc())
1859 1859 return dummy
1860 1860
1861 1861 @hybrid_property
1862 1862 def changeset_cache(self):
1863 1863 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1864 1864
1865 1865 @changeset_cache.setter
1866 1866 def changeset_cache(self, val):
1867 1867 try:
1868 1868 self._changeset_cache = json.dumps(val)
1869 1869 except Exception:
1870 1870 log.error(traceback.format_exc())
1871 1871
1872 1872 @hybrid_property
1873 1873 def repo_name(self):
1874 1874 return self._repo_name
1875 1875
1876 1876 @repo_name.setter
1877 1877 def repo_name(self, value):
1878 1878 self._repo_name = value
1879 1879 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1880 1880
1881 1881 @classmethod
1882 1882 def normalize_repo_name(cls, repo_name):
1883 1883 """
1884 1884 Normalizes os specific repo_name to the format internally stored inside
1885 1885 database using URL_SEP
1886 1886
1887 1887 :param cls:
1888 1888 :param repo_name:
1889 1889 """
1890 1890 return cls.NAME_SEP.join(repo_name.split(os.sep))
1891 1891
1892 1892 @classmethod
1893 1893 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1894 1894 session = Session()
1895 1895 q = session.query(cls).filter(cls.repo_name == repo_name)
1896 1896
1897 1897 if cache:
1898 1898 if identity_cache:
1899 1899 val = cls.identity_cache(session, 'repo_name', repo_name)
1900 1900 if val:
1901 1901 return val
1902 1902 else:
1903 1903 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1904 1904 q = q.options(
1905 1905 FromCache("sql_cache_short", cache_key))
1906 1906
1907 1907 return q.scalar()
1908 1908
1909 1909 @classmethod
1910 1910 def get_by_id_or_repo_name(cls, repoid):
1911 1911 if isinstance(repoid, (int, long)):
1912 1912 try:
1913 1913 repo = cls.get(repoid)
1914 1914 except ValueError:
1915 1915 repo = None
1916 1916 else:
1917 1917 repo = cls.get_by_repo_name(repoid)
1918 1918 return repo
1919 1919
1920 1920 @classmethod
1921 1921 def get_by_full_path(cls, repo_full_path):
1922 1922 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1923 1923 repo_name = cls.normalize_repo_name(repo_name)
1924 1924 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1925 1925
1926 1926 @classmethod
1927 1927 def get_repo_forks(cls, repo_id):
1928 1928 return cls.query().filter(Repository.fork_id == repo_id)
1929 1929
1930 1930 @classmethod
1931 1931 def base_path(cls):
1932 1932 """
1933 1933 Returns base path when all repos are stored
1934 1934
1935 1935 :param cls:
1936 1936 """
1937 1937 q = Session().query(RhodeCodeUi)\
1938 1938 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1939 1939 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1940 1940 return q.one().ui_value
1941 1941
1942 1942 @classmethod
1943 1943 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1944 1944 case_insensitive=True, archived=False):
1945 1945 q = Repository.query()
1946 1946
1947 1947 if not archived:
1948 1948 q = q.filter(Repository.archived.isnot(true()))
1949 1949
1950 1950 if not isinstance(user_id, Optional):
1951 1951 q = q.filter(Repository.user_id == user_id)
1952 1952
1953 1953 if not isinstance(group_id, Optional):
1954 1954 q = q.filter(Repository.group_id == group_id)
1955 1955
1956 1956 if case_insensitive:
1957 1957 q = q.order_by(func.lower(Repository.repo_name))
1958 1958 else:
1959 1959 q = q.order_by(Repository.repo_name)
1960 1960
1961 1961 return q.all()
1962 1962
1963 1963 @property
1964 1964 def repo_uid(self):
1965 1965 return '_{}'.format(self.repo_id)
1966 1966
1967 1967 @property
1968 1968 def forks(self):
1969 1969 """
1970 1970 Return forks of this repo
1971 1971 """
1972 1972 return Repository.get_repo_forks(self.repo_id)
1973 1973
1974 1974 @property
1975 1975 def parent(self):
1976 1976 """
1977 1977 Returns fork parent
1978 1978 """
1979 1979 return self.fork
1980 1980
1981 1981 @property
1982 1982 def just_name(self):
1983 1983 return self.repo_name.split(self.NAME_SEP)[-1]
1984 1984
1985 1985 @property
1986 1986 def groups_with_parents(self):
1987 1987 groups = []
1988 1988 if self.group is None:
1989 1989 return groups
1990 1990
1991 1991 cur_gr = self.group
1992 1992 groups.insert(0, cur_gr)
1993 1993 while 1:
1994 1994 gr = getattr(cur_gr, 'parent_group', None)
1995 1995 cur_gr = cur_gr.parent_group
1996 1996 if gr is None:
1997 1997 break
1998 1998 groups.insert(0, gr)
1999 1999
2000 2000 return groups
2001 2001
2002 2002 @property
2003 2003 def groups_and_repo(self):
2004 2004 return self.groups_with_parents, self
2005 2005
2006 2006 @LazyProperty
2007 2007 def repo_path(self):
2008 2008 """
2009 2009 Returns base full path for that repository means where it actually
2010 2010 exists on a filesystem
2011 2011 """
2012 2012 q = Session().query(RhodeCodeUi).filter(
2013 2013 RhodeCodeUi.ui_key == self.NAME_SEP)
2014 2014 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2015 2015 return q.one().ui_value
2016 2016
2017 2017 @property
2018 2018 def repo_full_path(self):
2019 2019 p = [self.repo_path]
2020 2020 # we need to split the name by / since this is how we store the
2021 2021 # names in the database, but that eventually needs to be converted
2022 2022 # into a valid system path
2023 2023 p += self.repo_name.split(self.NAME_SEP)
2024 2024 return os.path.join(*map(safe_unicode, p))
2025 2025
2026 2026 @property
2027 2027 def cache_keys(self):
2028 2028 """
2029 2029 Returns associated cache keys for that repo
2030 2030 """
2031 2031 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2032 2032 repo_id=self.repo_id)
2033 2033 return CacheKey.query()\
2034 2034 .filter(CacheKey.cache_args == invalidation_namespace)\
2035 2035 .order_by(CacheKey.cache_key)\
2036 2036 .all()
2037 2037
2038 2038 @property
2039 2039 def cached_diffs_relative_dir(self):
2040 2040 """
2041 2041 Return a relative to the repository store path of cached diffs
2042 2042 used for safe display for users, who shouldn't know the absolute store
2043 2043 path
2044 2044 """
2045 2045 return os.path.join(
2046 2046 os.path.dirname(self.repo_name),
2047 2047 self.cached_diffs_dir.split(os.path.sep)[-1])
2048 2048
2049 2049 @property
2050 2050 def cached_diffs_dir(self):
2051 2051 path = self.repo_full_path
2052 2052 return os.path.join(
2053 2053 os.path.dirname(path),
2054 2054 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
2055 2055
2056 2056 def cached_diffs(self):
2057 2057 diff_cache_dir = self.cached_diffs_dir
2058 2058 if os.path.isdir(diff_cache_dir):
2059 2059 return os.listdir(diff_cache_dir)
2060 2060 return []
2061 2061
2062 2062 def shadow_repos(self):
2063 2063 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2064 2064 return [
2065 2065 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2066 2066 if x.startswith(shadow_repos_pattern)]
2067 2067
2068 2068 def get_new_name(self, repo_name):
2069 2069 """
2070 2070 returns new full repository name based on assigned group and new new
2071 2071
2072 2072 :param group_name:
2073 2073 """
2074 2074 path_prefix = self.group.full_path_splitted if self.group else []
2075 2075 return self.NAME_SEP.join(path_prefix + [repo_name])
2076 2076
2077 2077 @property
2078 2078 def _config(self):
2079 2079 """
2080 2080 Returns db based config object.
2081 2081 """
2082 2082 from rhodecode.lib.utils import make_db_config
2083 2083 return make_db_config(clear_session=False, repo=self)
2084 2084
2085 2085 def permissions(self, with_admins=True, with_owner=True,
2086 2086 expand_from_user_groups=False):
2087 2087 """
2088 2088 Permissions for repositories
2089 2089 """
2090 2090 _admin_perm = 'repository.admin'
2091 2091
2092 2092 owner_row = []
2093 2093 if with_owner:
2094 2094 usr = AttributeDict(self.user.get_dict())
2095 2095 usr.owner_row = True
2096 2096 usr.permission = _admin_perm
2097 2097 usr.permission_id = None
2098 2098 owner_row.append(usr)
2099 2099
2100 2100 super_admin_ids = []
2101 2101 super_admin_rows = []
2102 2102 if with_admins:
2103 2103 for usr in User.get_all_super_admins():
2104 2104 super_admin_ids.append(usr.user_id)
2105 2105 # if this admin is also owner, don't double the record
2106 2106 if usr.user_id == owner_row[0].user_id:
2107 2107 owner_row[0].admin_row = True
2108 2108 else:
2109 2109 usr = AttributeDict(usr.get_dict())
2110 2110 usr.admin_row = True
2111 2111 usr.permission = _admin_perm
2112 2112 usr.permission_id = None
2113 2113 super_admin_rows.append(usr)
2114 2114
2115 2115 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2116 2116 q = q.options(joinedload(UserRepoToPerm.repository),
2117 2117 joinedload(UserRepoToPerm.user),
2118 2118 joinedload(UserRepoToPerm.permission),)
2119 2119
2120 2120 # get owners and admins and permissions. We do a trick of re-writing
2121 2121 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2122 2122 # has a global reference and changing one object propagates to all
2123 2123 # others. This means if admin is also an owner admin_row that change
2124 2124 # would propagate to both objects
2125 2125 perm_rows = []
2126 2126 for _usr in q.all():
2127 2127 usr = AttributeDict(_usr.user.get_dict())
2128 2128 # if this user is also owner/admin, mark as duplicate record
2129 2129 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2130 2130 usr.duplicate_perm = True
2131 2131 # also check if this permission is maybe used by branch_permissions
2132 2132 if _usr.branch_perm_entry:
2133 2133 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2134 2134
2135 2135 usr.permission = _usr.permission.permission_name
2136 2136 usr.permission_id = _usr.repo_to_perm_id
2137 2137 perm_rows.append(usr)
2138 2138
2139 2139 # filter the perm rows by 'default' first and then sort them by
2140 2140 # admin,write,read,none permissions sorted again alphabetically in
2141 2141 # each group
2142 2142 perm_rows = sorted(perm_rows, key=display_user_sort)
2143 2143
2144 2144 user_groups_rows = []
2145 2145 if expand_from_user_groups:
2146 2146 for ug in self.permission_user_groups(with_members=True):
2147 2147 for user_data in ug.members:
2148 2148 user_groups_rows.append(user_data)
2149 2149
2150 2150 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2151 2151
2152 2152 def permission_user_groups(self, with_members=True):
2153 2153 q = UserGroupRepoToPerm.query()\
2154 2154 .filter(UserGroupRepoToPerm.repository == self)
2155 2155 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2156 2156 joinedload(UserGroupRepoToPerm.users_group),
2157 2157 joinedload(UserGroupRepoToPerm.permission),)
2158 2158
2159 2159 perm_rows = []
2160 2160 for _user_group in q.all():
2161 2161 entry = AttributeDict(_user_group.users_group.get_dict())
2162 2162 entry.permission = _user_group.permission.permission_name
2163 2163 if with_members:
2164 2164 entry.members = [x.user.get_dict()
2165 2165 for x in _user_group.users_group.members]
2166 2166 perm_rows.append(entry)
2167 2167
2168 2168 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2169 2169 return perm_rows
2170 2170
2171 2171 def get_api_data(self, include_secrets=False):
2172 2172 """
2173 2173 Common function for generating repo api data
2174 2174
2175 2175 :param include_secrets: See :meth:`User.get_api_data`.
2176 2176
2177 2177 """
2178 2178 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2179 2179 # move this methods on models level.
2180 2180 from rhodecode.model.settings import SettingsModel
2181 2181 from rhodecode.model.repo import RepoModel
2182 2182
2183 2183 repo = self
2184 2184 _user_id, _time, _reason = self.locked
2185 2185
2186 2186 data = {
2187 2187 'repo_id': repo.repo_id,
2188 2188 'repo_name': repo.repo_name,
2189 2189 'repo_type': repo.repo_type,
2190 2190 'clone_uri': repo.clone_uri or '',
2191 2191 'push_uri': repo.push_uri or '',
2192 2192 'url': RepoModel().get_url(self),
2193 2193 'private': repo.private,
2194 2194 'created_on': repo.created_on,
2195 2195 'description': repo.description_safe,
2196 2196 'landing_rev': repo.landing_rev,
2197 2197 'owner': repo.user.username,
2198 2198 'fork_of': repo.fork.repo_name if repo.fork else None,
2199 2199 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2200 2200 'enable_statistics': repo.enable_statistics,
2201 2201 'enable_locking': repo.enable_locking,
2202 2202 'enable_downloads': repo.enable_downloads,
2203 2203 'last_changeset': repo.changeset_cache,
2204 2204 'locked_by': User.get(_user_id).get_api_data(
2205 2205 include_secrets=include_secrets) if _user_id else None,
2206 2206 'locked_date': time_to_datetime(_time) if _time else None,
2207 2207 'lock_reason': _reason if _reason else None,
2208 2208 }
2209 2209
2210 2210 # TODO: mikhail: should be per-repo settings here
2211 2211 rc_config = SettingsModel().get_all_settings()
2212 2212 repository_fields = str2bool(
2213 2213 rc_config.get('rhodecode_repository_fields'))
2214 2214 if repository_fields:
2215 2215 for f in self.extra_fields:
2216 2216 data[f.field_key_prefixed] = f.field_value
2217 2217
2218 2218 return data
2219 2219
2220 2220 @classmethod
2221 2221 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2222 2222 if not lock_time:
2223 2223 lock_time = time.time()
2224 2224 if not lock_reason:
2225 2225 lock_reason = cls.LOCK_AUTOMATIC
2226 2226 repo.locked = [user_id, lock_time, lock_reason]
2227 2227 Session().add(repo)
2228 2228 Session().commit()
2229 2229
2230 2230 @classmethod
2231 2231 def unlock(cls, repo):
2232 2232 repo.locked = None
2233 2233 Session().add(repo)
2234 2234 Session().commit()
2235 2235
2236 2236 @classmethod
2237 2237 def getlock(cls, repo):
2238 2238 return repo.locked
2239 2239
2240 2240 def is_user_lock(self, user_id):
2241 2241 if self.lock[0]:
2242 2242 lock_user_id = safe_int(self.lock[0])
2243 2243 user_id = safe_int(user_id)
2244 2244 # both are ints, and they are equal
2245 2245 return all([lock_user_id, user_id]) and lock_user_id == user_id
2246 2246
2247 2247 return False
2248 2248
2249 2249 def get_locking_state(self, action, user_id, only_when_enabled=True):
2250 2250 """
2251 2251 Checks locking on this repository, if locking is enabled and lock is
2252 2252 present returns a tuple of make_lock, locked, locked_by.
2253 2253 make_lock can have 3 states None (do nothing) True, make lock
2254 2254 False release lock, This value is later propagated to hooks, which
2255 2255 do the locking. Think about this as signals passed to hooks what to do.
2256 2256
2257 2257 """
2258 2258 # TODO: johbo: This is part of the business logic and should be moved
2259 2259 # into the RepositoryModel.
2260 2260
2261 2261 if action not in ('push', 'pull'):
2262 2262 raise ValueError("Invalid action value: %s" % repr(action))
2263 2263
2264 2264 # defines if locked error should be thrown to user
2265 2265 currently_locked = False
2266 2266 # defines if new lock should be made, tri-state
2267 2267 make_lock = None
2268 2268 repo = self
2269 2269 user = User.get(user_id)
2270 2270
2271 2271 lock_info = repo.locked
2272 2272
2273 2273 if repo and (repo.enable_locking or not only_when_enabled):
2274 2274 if action == 'push':
2275 2275 # check if it's already locked !, if it is compare users
2276 2276 locked_by_user_id = lock_info[0]
2277 2277 if user.user_id == locked_by_user_id:
2278 2278 log.debug(
2279 2279 'Got `push` action from user %s, now unlocking', user)
2280 2280 # unlock if we have push from user who locked
2281 2281 make_lock = False
2282 2282 else:
2283 2283 # we're not the same user who locked, ban with
2284 2284 # code defined in settings (default is 423 HTTP Locked) !
2285 2285 log.debug('Repo %s is currently locked by %s', repo, user)
2286 2286 currently_locked = True
2287 2287 elif action == 'pull':
2288 2288 # [0] user [1] date
2289 2289 if lock_info[0] and lock_info[1]:
2290 2290 log.debug('Repo %s is currently locked by %s', repo, user)
2291 2291 currently_locked = True
2292 2292 else:
2293 2293 log.debug('Setting lock on repo %s by %s', repo, user)
2294 2294 make_lock = True
2295 2295
2296 2296 else:
2297 2297 log.debug('Repository %s do not have locking enabled', repo)
2298 2298
2299 2299 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2300 2300 make_lock, currently_locked, lock_info)
2301 2301
2302 2302 from rhodecode.lib.auth import HasRepoPermissionAny
2303 2303 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2304 2304 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2305 2305 # if we don't have at least write permission we cannot make a lock
2306 2306 log.debug('lock state reset back to FALSE due to lack '
2307 2307 'of at least read permission')
2308 2308 make_lock = False
2309 2309
2310 2310 return make_lock, currently_locked, lock_info
2311 2311
2312 2312 @property
2313 2313 def last_commit_cache_update_diff(self):
2314 2314 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2315 2315
2316 2316 @classmethod
2317 2317 def _load_commit_change(cls, last_commit_cache):
2318 2318 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2319 2319 empty_date = datetime.datetime.fromtimestamp(0)
2320 2320 date_latest = last_commit_cache.get('date', empty_date)
2321 2321 try:
2322 2322 return parse_datetime(date_latest)
2323 2323 except Exception:
2324 2324 return empty_date
2325 2325
2326 2326 @property
2327 2327 def last_commit_change(self):
2328 2328 return self._load_commit_change(self.changeset_cache)
2329 2329
2330 2330 @property
2331 2331 def last_db_change(self):
2332 2332 return self.updated_on
2333 2333
2334 2334 @property
2335 2335 def clone_uri_hidden(self):
2336 2336 clone_uri = self.clone_uri
2337 2337 if clone_uri:
2338 2338 import urlobject
2339 2339 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2340 2340 if url_obj.password:
2341 2341 clone_uri = url_obj.with_password('*****')
2342 2342 return clone_uri
2343 2343
2344 2344 @property
2345 2345 def push_uri_hidden(self):
2346 2346 push_uri = self.push_uri
2347 2347 if push_uri:
2348 2348 import urlobject
2349 2349 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2350 2350 if url_obj.password:
2351 2351 push_uri = url_obj.with_password('*****')
2352 2352 return push_uri
2353 2353
2354 2354 def clone_url(self, **override):
2355 2355 from rhodecode.model.settings import SettingsModel
2356 2356
2357 2357 uri_tmpl = None
2358 2358 if 'with_id' in override:
2359 2359 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2360 2360 del override['with_id']
2361 2361
2362 2362 if 'uri_tmpl' in override:
2363 2363 uri_tmpl = override['uri_tmpl']
2364 2364 del override['uri_tmpl']
2365 2365
2366 2366 ssh = False
2367 2367 if 'ssh' in override:
2368 2368 ssh = True
2369 2369 del override['ssh']
2370 2370
2371 2371 # we didn't override our tmpl from **overrides
2372 2372 request = get_current_request()
2373 2373 if not uri_tmpl:
2374 2374 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2375 2375 rc_config = request.call_context.rc_config
2376 2376 else:
2377 2377 rc_config = SettingsModel().get_all_settings(cache=True)
2378 2378
2379 2379 if ssh:
2380 2380 uri_tmpl = rc_config.get(
2381 2381 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2382 2382
2383 2383 else:
2384 2384 uri_tmpl = rc_config.get(
2385 2385 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2386 2386
2387 2387 return get_clone_url(request=request,
2388 2388 uri_tmpl=uri_tmpl,
2389 2389 repo_name=self.repo_name,
2390 2390 repo_id=self.repo_id,
2391 2391 repo_type=self.repo_type,
2392 2392 **override)
2393 2393
2394 2394 def set_state(self, state):
2395 2395 self.repo_state = state
2396 2396 Session().add(self)
2397 2397 #==========================================================================
2398 2398 # SCM PROPERTIES
2399 2399 #==========================================================================
2400 2400
2401 2401 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False):
2402 2402 return get_commit_safe(
2403 2403 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2404 2404 maybe_unreachable=maybe_unreachable)
2405 2405
2406 2406 def get_changeset(self, rev=None, pre_load=None):
2407 2407 warnings.warn("Use get_commit", DeprecationWarning)
2408 2408 commit_id = None
2409 2409 commit_idx = None
2410 2410 if isinstance(rev, compat.string_types):
2411 2411 commit_id = rev
2412 2412 else:
2413 2413 commit_idx = rev
2414 2414 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2415 2415 pre_load=pre_load)
2416 2416
2417 2417 def get_landing_commit(self):
2418 2418 """
2419 2419 Returns landing commit, or if that doesn't exist returns the tip
2420 2420 """
2421 2421 _rev_type, _rev = self.landing_rev
2422 2422 commit = self.get_commit(_rev)
2423 2423 if isinstance(commit, EmptyCommit):
2424 2424 return self.get_commit()
2425 2425 return commit
2426 2426
2427 2427 def flush_commit_cache(self):
2428 2428 self.update_commit_cache(cs_cache={'raw_id':'0'})
2429 2429 self.update_commit_cache()
2430 2430
2431 2431 def update_commit_cache(self, cs_cache=None, config=None):
2432 2432 """
2433 2433 Update cache of last commit for repository
2434 2434 cache_keys should be::
2435 2435
2436 2436 source_repo_id
2437 2437 short_id
2438 2438 raw_id
2439 2439 revision
2440 2440 parents
2441 2441 message
2442 2442 date
2443 2443 author
2444 2444 updated_on
2445 2445
2446 2446 """
2447 2447 from rhodecode.lib.vcs.backends.base import BaseChangeset
2448 2448 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2449 2449 empty_date = datetime.datetime.fromtimestamp(0)
2450 2450
2451 2451 if cs_cache is None:
2452 2452 # use no-cache version here
2453 2453 try:
2454 2454 scm_repo = self.scm_instance(cache=False, config=config)
2455 2455 except VCSError:
2456 2456 scm_repo = None
2457 2457 empty = scm_repo is None or scm_repo.is_empty()
2458 2458
2459 2459 if not empty:
2460 2460 cs_cache = scm_repo.get_commit(
2461 2461 pre_load=["author", "date", "message", "parents", "branch"])
2462 2462 else:
2463 2463 cs_cache = EmptyCommit()
2464 2464
2465 2465 if isinstance(cs_cache, BaseChangeset):
2466 2466 cs_cache = cs_cache.__json__()
2467 2467
2468 2468 def is_outdated(new_cs_cache):
2469 2469 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2470 2470 new_cs_cache['revision'] != self.changeset_cache['revision']):
2471 2471 return True
2472 2472 return False
2473 2473
2474 2474 # check if we have maybe already latest cached revision
2475 2475 if is_outdated(cs_cache) or not self.changeset_cache:
2476 2476 _current_datetime = datetime.datetime.utcnow()
2477 2477 last_change = cs_cache.get('date') or _current_datetime
2478 2478 # we check if last update is newer than the new value
2479 2479 # if yes, we use the current timestamp instead. Imagine you get
2480 2480 # old commit pushed 1y ago, we'd set last update 1y to ago.
2481 2481 last_change_timestamp = datetime_to_time(last_change)
2482 2482 current_timestamp = datetime_to_time(last_change)
2483 2483 if last_change_timestamp > current_timestamp and not empty:
2484 2484 cs_cache['date'] = _current_datetime
2485 2485
2486 2486 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2487 2487 cs_cache['updated_on'] = time.time()
2488 2488 self.changeset_cache = cs_cache
2489 2489 self.updated_on = last_change
2490 2490 Session().add(self)
2491 2491 Session().commit()
2492 2492
2493 2493 else:
2494 2494 if empty:
2495 2495 cs_cache = EmptyCommit().__json__()
2496 2496 else:
2497 2497 cs_cache = self.changeset_cache
2498 2498
2499 2499 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2500 2500
2501 2501 cs_cache['updated_on'] = time.time()
2502 2502 self.changeset_cache = cs_cache
2503 2503 self.updated_on = _date_latest
2504 2504 Session().add(self)
2505 2505 Session().commit()
2506 2506
2507 2507 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2508 2508 self.repo_name, cs_cache, _date_latest)
2509 2509
2510 2510 @property
2511 2511 def tip(self):
2512 2512 return self.get_commit('tip')
2513 2513
2514 2514 @property
2515 2515 def author(self):
2516 2516 return self.tip.author
2517 2517
2518 2518 @property
2519 2519 def last_change(self):
2520 2520 return self.scm_instance().last_change
2521 2521
2522 2522 def get_comments(self, revisions=None):
2523 2523 """
2524 2524 Returns comments for this repository grouped by revisions
2525 2525
2526 2526 :param revisions: filter query by revisions only
2527 2527 """
2528 2528 cmts = ChangesetComment.query()\
2529 2529 .filter(ChangesetComment.repo == self)
2530 2530 if revisions:
2531 2531 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2532 2532 grouped = collections.defaultdict(list)
2533 2533 for cmt in cmts.all():
2534 2534 grouped[cmt.revision].append(cmt)
2535 2535 return grouped
2536 2536
2537 2537 def statuses(self, revisions=None):
2538 2538 """
2539 2539 Returns statuses for this repository
2540 2540
2541 2541 :param revisions: list of revisions to get statuses for
2542 2542 """
2543 2543 statuses = ChangesetStatus.query()\
2544 2544 .filter(ChangesetStatus.repo == self)\
2545 2545 .filter(ChangesetStatus.version == 0)
2546 2546
2547 2547 if revisions:
2548 2548 # Try doing the filtering in chunks to avoid hitting limits
2549 2549 size = 500
2550 2550 status_results = []
2551 2551 for chunk in xrange(0, len(revisions), size):
2552 2552 status_results += statuses.filter(
2553 2553 ChangesetStatus.revision.in_(
2554 2554 revisions[chunk: chunk+size])
2555 2555 ).all()
2556 2556 else:
2557 2557 status_results = statuses.all()
2558 2558
2559 2559 grouped = {}
2560 2560
2561 2561 # maybe we have open new pullrequest without a status?
2562 2562 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2563 2563 status_lbl = ChangesetStatus.get_status_lbl(stat)
2564 2564 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2565 2565 for rev in pr.revisions:
2566 2566 pr_id = pr.pull_request_id
2567 2567 pr_repo = pr.target_repo.repo_name
2568 2568 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2569 2569
2570 2570 for stat in status_results:
2571 2571 pr_id = pr_repo = None
2572 2572 if stat.pull_request:
2573 2573 pr_id = stat.pull_request.pull_request_id
2574 2574 pr_repo = stat.pull_request.target_repo.repo_name
2575 2575 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2576 2576 pr_id, pr_repo]
2577 2577 return grouped
2578 2578
2579 2579 # ==========================================================================
2580 2580 # SCM CACHE INSTANCE
2581 2581 # ==========================================================================
2582 2582
2583 2583 def scm_instance(self, **kwargs):
2584 2584 import rhodecode
2585 2585
2586 2586 # Passing a config will not hit the cache currently only used
2587 2587 # for repo2dbmapper
2588 2588 config = kwargs.pop('config', None)
2589 2589 cache = kwargs.pop('cache', None)
2590 2590 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2591 2591 if vcs_full_cache is not None:
2592 2592 # allows override global config
2593 2593 full_cache = vcs_full_cache
2594 2594 else:
2595 2595 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2596 2596 # if cache is NOT defined use default global, else we have a full
2597 2597 # control over cache behaviour
2598 2598 if cache is None and full_cache and not config:
2599 2599 log.debug('Initializing pure cached instance for %s', self.repo_path)
2600 2600 return self._get_instance_cached()
2601 2601
2602 2602 # cache here is sent to the "vcs server"
2603 2603 return self._get_instance(cache=bool(cache), config=config)
2604 2604
2605 2605 def _get_instance_cached(self):
2606 2606 from rhodecode.lib import rc_cache
2607 2607
2608 2608 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2609 2609 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2610 2610 repo_id=self.repo_id)
2611 2611 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2612 2612
2613 2613 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2614 2614 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2615 2615 return self._get_instance(repo_state_uid=_cache_state_uid)
2616 2616
2617 2617 # we must use thread scoped cache here,
2618 2618 # because each thread of gevent needs it's own not shared connection and cache
2619 2619 # we also alter `args` so the cache key is individual for every green thread.
2620 2620 inv_context_manager = rc_cache.InvalidationContext(
2621 2621 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2622 2622 thread_scoped=True)
2623 2623 with inv_context_manager as invalidation_context:
2624 2624 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2625 2625 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2626 2626
2627 2627 # re-compute and store cache if we get invalidate signal
2628 2628 if invalidation_context.should_invalidate():
2629 2629 instance = get_instance_cached.refresh(*args)
2630 2630 else:
2631 2631 instance = get_instance_cached(*args)
2632 2632
2633 2633 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2634 2634 return instance
2635 2635
2636 2636 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2637 2637 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2638 2638 self.repo_type, self.repo_path, cache)
2639 2639 config = config or self._config
2640 2640 custom_wire = {
2641 2641 'cache': cache, # controls the vcs.remote cache
2642 2642 'repo_state_uid': repo_state_uid
2643 2643 }
2644 2644 repo = get_vcs_instance(
2645 2645 repo_path=safe_str(self.repo_full_path),
2646 2646 config=config,
2647 2647 with_wire=custom_wire,
2648 2648 create=False,
2649 2649 _vcs_alias=self.repo_type)
2650 2650 if repo is not None:
2651 2651 repo.count() # cache rebuild
2652 2652 return repo
2653 2653
2654 2654 def get_shadow_repository_path(self, workspace_id):
2655 2655 from rhodecode.lib.vcs.backends.base import BaseRepository
2656 2656 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2657 2657 self.repo_full_path, self.repo_id, workspace_id)
2658 2658 return shadow_repo_path
2659 2659
2660 2660 def __json__(self):
2661 2661 return {'landing_rev': self.landing_rev}
2662 2662
2663 2663 def get_dict(self):
2664 2664
2665 2665 # Since we transformed `repo_name` to a hybrid property, we need to
2666 2666 # keep compatibility with the code which uses `repo_name` field.
2667 2667
2668 2668 result = super(Repository, self).get_dict()
2669 2669 result['repo_name'] = result.pop('_repo_name', None)
2670 2670 return result
2671 2671
2672 2672
2673 2673 class RepoGroup(Base, BaseModel):
2674 2674 __tablename__ = 'groups'
2675 2675 __table_args__ = (
2676 2676 UniqueConstraint('group_name', 'group_parent_id'),
2677 2677 base_table_args,
2678 2678 )
2679 2679 __mapper_args__ = {'order_by': 'group_name'}
2680 2680
2681 2681 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2682 2682
2683 2683 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2684 2684 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2685 2685 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2686 2686 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2687 2687 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2688 2688 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2689 2689 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2690 2690 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2691 2691 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2692 2692 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2693 2693 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2694 2694
2695 2695 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2696 2696 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2697 2697 parent_group = relationship('RepoGroup', remote_side=group_id)
2698 2698 user = relationship('User')
2699 2699 integrations = relationship('Integration', cascade="all, delete-orphan")
2700 2700
2701 2701 # no cascade, set NULL
2702 2702 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2703 2703
2704 2704 def __init__(self, group_name='', parent_group=None):
2705 2705 self.group_name = group_name
2706 2706 self.parent_group = parent_group
2707 2707
2708 2708 def __unicode__(self):
2709 2709 return u"<%s('id:%s:%s')>" % (
2710 2710 self.__class__.__name__, self.group_id, self.group_name)
2711 2711
2712 2712 @hybrid_property
2713 2713 def group_name(self):
2714 2714 return self._group_name
2715 2715
2716 2716 @group_name.setter
2717 2717 def group_name(self, value):
2718 2718 self._group_name = value
2719 2719 self.group_name_hash = self.hash_repo_group_name(value)
2720 2720
2721 2721 @classmethod
2722 2722 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2723 2723 from rhodecode.lib.vcs.backends.base import EmptyCommit
2724 2724 dummy = EmptyCommit().__json__()
2725 2725 if not changeset_cache_raw:
2726 2726 dummy['source_repo_id'] = repo_id
2727 2727 return json.loads(json.dumps(dummy))
2728 2728
2729 2729 try:
2730 2730 return json.loads(changeset_cache_raw)
2731 2731 except TypeError:
2732 2732 return dummy
2733 2733 except Exception:
2734 2734 log.error(traceback.format_exc())
2735 2735 return dummy
2736 2736
2737 2737 @hybrid_property
2738 2738 def changeset_cache(self):
2739 2739 return self._load_changeset_cache('', self._changeset_cache)
2740 2740
2741 2741 @changeset_cache.setter
2742 2742 def changeset_cache(self, val):
2743 2743 try:
2744 2744 self._changeset_cache = json.dumps(val)
2745 2745 except Exception:
2746 2746 log.error(traceback.format_exc())
2747 2747
2748 2748 @validates('group_parent_id')
2749 2749 def validate_group_parent_id(self, key, val):
2750 2750 """
2751 2751 Check cycle references for a parent group to self
2752 2752 """
2753 2753 if self.group_id and val:
2754 2754 assert val != self.group_id
2755 2755
2756 2756 return val
2757 2757
2758 2758 @hybrid_property
2759 2759 def description_safe(self):
2760 2760 from rhodecode.lib import helpers as h
2761 2761 return h.escape(self.group_description)
2762 2762
2763 2763 @classmethod
2764 2764 def hash_repo_group_name(cls, repo_group_name):
2765 2765 val = remove_formatting(repo_group_name)
2766 2766 val = safe_str(val).lower()
2767 2767 chars = []
2768 2768 for c in val:
2769 2769 if c not in string.ascii_letters:
2770 2770 c = str(ord(c))
2771 2771 chars.append(c)
2772 2772
2773 2773 return ''.join(chars)
2774 2774
2775 2775 @classmethod
2776 2776 def _generate_choice(cls, repo_group):
2777 2777 from webhelpers2.html import literal as _literal
2778 2778 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2779 2779 return repo_group.group_id, _name(repo_group.full_path_splitted)
2780 2780
2781 2781 @classmethod
2782 2782 def groups_choices(cls, groups=None, show_empty_group=True):
2783 2783 if not groups:
2784 2784 groups = cls.query().all()
2785 2785
2786 2786 repo_groups = []
2787 2787 if show_empty_group:
2788 2788 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2789 2789
2790 2790 repo_groups.extend([cls._generate_choice(x) for x in groups])
2791 2791
2792 2792 repo_groups = sorted(
2793 2793 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2794 2794 return repo_groups
2795 2795
2796 2796 @classmethod
2797 2797 def url_sep(cls):
2798 2798 return URL_SEP
2799 2799
2800 2800 @classmethod
2801 2801 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2802 2802 if case_insensitive:
2803 2803 gr = cls.query().filter(func.lower(cls.group_name)
2804 2804 == func.lower(group_name))
2805 2805 else:
2806 2806 gr = cls.query().filter(cls.group_name == group_name)
2807 2807 if cache:
2808 2808 name_key = _hash_key(group_name)
2809 2809 gr = gr.options(
2810 2810 FromCache("sql_cache_short", "get_group_%s" % name_key))
2811 2811 return gr.scalar()
2812 2812
2813 2813 @classmethod
2814 2814 def get_user_personal_repo_group(cls, user_id):
2815 2815 user = User.get(user_id)
2816 2816 if user.username == User.DEFAULT_USER:
2817 2817 return None
2818 2818
2819 2819 return cls.query()\
2820 2820 .filter(cls.personal == true()) \
2821 2821 .filter(cls.user == user) \
2822 2822 .order_by(cls.group_id.asc()) \
2823 2823 .first()
2824 2824
2825 2825 @classmethod
2826 2826 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2827 2827 case_insensitive=True):
2828 2828 q = RepoGroup.query()
2829 2829
2830 2830 if not isinstance(user_id, Optional):
2831 2831 q = q.filter(RepoGroup.user_id == user_id)
2832 2832
2833 2833 if not isinstance(group_id, Optional):
2834 2834 q = q.filter(RepoGroup.group_parent_id == group_id)
2835 2835
2836 2836 if case_insensitive:
2837 2837 q = q.order_by(func.lower(RepoGroup.group_name))
2838 2838 else:
2839 2839 q = q.order_by(RepoGroup.group_name)
2840 2840 return q.all()
2841 2841
2842 2842 @property
2843 2843 def parents(self, parents_recursion_limit=10):
2844 2844 groups = []
2845 2845 if self.parent_group is None:
2846 2846 return groups
2847 2847 cur_gr = self.parent_group
2848 2848 groups.insert(0, cur_gr)
2849 2849 cnt = 0
2850 2850 while 1:
2851 2851 cnt += 1
2852 2852 gr = getattr(cur_gr, 'parent_group', None)
2853 2853 cur_gr = cur_gr.parent_group
2854 2854 if gr is None:
2855 2855 break
2856 2856 if cnt == parents_recursion_limit:
2857 2857 # this will prevent accidental infinit loops
2858 2858 log.error('more than %s parents found for group %s, stopping '
2859 2859 'recursive parent fetching', parents_recursion_limit, self)
2860 2860 break
2861 2861
2862 2862 groups.insert(0, gr)
2863 2863 return groups
2864 2864
2865 2865 @property
2866 2866 def last_commit_cache_update_diff(self):
2867 2867 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2868 2868
2869 2869 @classmethod
2870 2870 def _load_commit_change(cls, last_commit_cache):
2871 2871 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2872 2872 empty_date = datetime.datetime.fromtimestamp(0)
2873 2873 date_latest = last_commit_cache.get('date', empty_date)
2874 2874 try:
2875 2875 return parse_datetime(date_latest)
2876 2876 except Exception:
2877 2877 return empty_date
2878 2878
2879 2879 @property
2880 2880 def last_commit_change(self):
2881 2881 return self._load_commit_change(self.changeset_cache)
2882 2882
2883 2883 @property
2884 2884 def last_db_change(self):
2885 2885 return self.updated_on
2886 2886
2887 2887 @property
2888 2888 def children(self):
2889 2889 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2890 2890
2891 2891 @property
2892 2892 def name(self):
2893 2893 return self.group_name.split(RepoGroup.url_sep())[-1]
2894 2894
2895 2895 @property
2896 2896 def full_path(self):
2897 2897 return self.group_name
2898 2898
2899 2899 @property
2900 2900 def full_path_splitted(self):
2901 2901 return self.group_name.split(RepoGroup.url_sep())
2902 2902
2903 2903 @property
2904 2904 def repositories(self):
2905 2905 return Repository.query()\
2906 2906 .filter(Repository.group == self)\
2907 2907 .order_by(Repository.repo_name)
2908 2908
2909 2909 @property
2910 2910 def repositories_recursive_count(self):
2911 2911 cnt = self.repositories.count()
2912 2912
2913 2913 def children_count(group):
2914 2914 cnt = 0
2915 2915 for child in group.children:
2916 2916 cnt += child.repositories.count()
2917 2917 cnt += children_count(child)
2918 2918 return cnt
2919 2919
2920 2920 return cnt + children_count(self)
2921 2921
2922 2922 def _recursive_objects(self, include_repos=True, include_groups=True):
2923 2923 all_ = []
2924 2924
2925 2925 def _get_members(root_gr):
2926 2926 if include_repos:
2927 2927 for r in root_gr.repositories:
2928 2928 all_.append(r)
2929 2929 childs = root_gr.children.all()
2930 2930 if childs:
2931 2931 for gr in childs:
2932 2932 if include_groups:
2933 2933 all_.append(gr)
2934 2934 _get_members(gr)
2935 2935
2936 2936 root_group = []
2937 2937 if include_groups:
2938 2938 root_group = [self]
2939 2939
2940 2940 _get_members(self)
2941 2941 return root_group + all_
2942 2942
2943 2943 def recursive_groups_and_repos(self):
2944 2944 """
2945 2945 Recursive return all groups, with repositories in those groups
2946 2946 """
2947 2947 return self._recursive_objects()
2948 2948
2949 2949 def recursive_groups(self):
2950 2950 """
2951 2951 Returns all children groups for this group including children of children
2952 2952 """
2953 2953 return self._recursive_objects(include_repos=False)
2954 2954
2955 2955 def recursive_repos(self):
2956 2956 """
2957 2957 Returns all children repositories for this group
2958 2958 """
2959 2959 return self._recursive_objects(include_groups=False)
2960 2960
2961 2961 def get_new_name(self, group_name):
2962 2962 """
2963 2963 returns new full group name based on parent and new name
2964 2964
2965 2965 :param group_name:
2966 2966 """
2967 2967 path_prefix = (self.parent_group.full_path_splitted if
2968 2968 self.parent_group else [])
2969 2969 return RepoGroup.url_sep().join(path_prefix + [group_name])
2970 2970
2971 2971 def update_commit_cache(self, config=None):
2972 2972 """
2973 2973 Update cache of last commit for newest repository inside this repository group.
2974 2974 cache_keys should be::
2975 2975
2976 2976 source_repo_id
2977 2977 short_id
2978 2978 raw_id
2979 2979 revision
2980 2980 parents
2981 2981 message
2982 2982 date
2983 2983 author
2984 2984
2985 2985 """
2986 2986 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2987 2987 empty_date = datetime.datetime.fromtimestamp(0)
2988 2988
2989 2989 def repo_groups_and_repos(root_gr):
2990 2990 for _repo in root_gr.repositories:
2991 2991 yield _repo
2992 2992 for child_group in root_gr.children.all():
2993 2993 yield child_group
2994 2994
2995 2995 latest_repo_cs_cache = {}
2996 2996 for obj in repo_groups_and_repos(self):
2997 2997 repo_cs_cache = obj.changeset_cache
2998 2998 date_latest = latest_repo_cs_cache.get('date', empty_date)
2999 2999 date_current = repo_cs_cache.get('date', empty_date)
3000 3000 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3001 3001 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3002 3002 latest_repo_cs_cache = repo_cs_cache
3003 3003 if hasattr(obj, 'repo_id'):
3004 3004 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3005 3005 else:
3006 3006 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3007 3007
3008 3008 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3009 3009
3010 3010 latest_repo_cs_cache['updated_on'] = time.time()
3011 3011 self.changeset_cache = latest_repo_cs_cache
3012 3012 self.updated_on = _date_latest
3013 3013 Session().add(self)
3014 3014 Session().commit()
3015 3015
3016 3016 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3017 3017 self.group_name, latest_repo_cs_cache, _date_latest)
3018 3018
3019 3019 def permissions(self, with_admins=True, with_owner=True,
3020 3020 expand_from_user_groups=False):
3021 3021 """
3022 3022 Permissions for repository groups
3023 3023 """
3024 3024 _admin_perm = 'group.admin'
3025 3025
3026 3026 owner_row = []
3027 3027 if with_owner:
3028 3028 usr = AttributeDict(self.user.get_dict())
3029 3029 usr.owner_row = True
3030 3030 usr.permission = _admin_perm
3031 3031 owner_row.append(usr)
3032 3032
3033 3033 super_admin_ids = []
3034 3034 super_admin_rows = []
3035 3035 if with_admins:
3036 3036 for usr in User.get_all_super_admins():
3037 3037 super_admin_ids.append(usr.user_id)
3038 3038 # if this admin is also owner, don't double the record
3039 3039 if usr.user_id == owner_row[0].user_id:
3040 3040 owner_row[0].admin_row = True
3041 3041 else:
3042 3042 usr = AttributeDict(usr.get_dict())
3043 3043 usr.admin_row = True
3044 3044 usr.permission = _admin_perm
3045 3045 super_admin_rows.append(usr)
3046 3046
3047 3047 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3048 3048 q = q.options(joinedload(UserRepoGroupToPerm.group),
3049 3049 joinedload(UserRepoGroupToPerm.user),
3050 3050 joinedload(UserRepoGroupToPerm.permission),)
3051 3051
3052 3052 # get owners and admins and permissions. We do a trick of re-writing
3053 3053 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3054 3054 # has a global reference and changing one object propagates to all
3055 3055 # others. This means if admin is also an owner admin_row that change
3056 3056 # would propagate to both objects
3057 3057 perm_rows = []
3058 3058 for _usr in q.all():
3059 3059 usr = AttributeDict(_usr.user.get_dict())
3060 3060 # if this user is also owner/admin, mark as duplicate record
3061 3061 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3062 3062 usr.duplicate_perm = True
3063 3063 usr.permission = _usr.permission.permission_name
3064 3064 perm_rows.append(usr)
3065 3065
3066 3066 # filter the perm rows by 'default' first and then sort them by
3067 3067 # admin,write,read,none permissions sorted again alphabetically in
3068 3068 # each group
3069 3069 perm_rows = sorted(perm_rows, key=display_user_sort)
3070 3070
3071 3071 user_groups_rows = []
3072 3072 if expand_from_user_groups:
3073 3073 for ug in self.permission_user_groups(with_members=True):
3074 3074 for user_data in ug.members:
3075 3075 user_groups_rows.append(user_data)
3076 3076
3077 3077 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3078 3078
3079 3079 def permission_user_groups(self, with_members=False):
3080 3080 q = UserGroupRepoGroupToPerm.query()\
3081 3081 .filter(UserGroupRepoGroupToPerm.group == self)
3082 3082 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3083 3083 joinedload(UserGroupRepoGroupToPerm.users_group),
3084 3084 joinedload(UserGroupRepoGroupToPerm.permission),)
3085 3085
3086 3086 perm_rows = []
3087 3087 for _user_group in q.all():
3088 3088 entry = AttributeDict(_user_group.users_group.get_dict())
3089 3089 entry.permission = _user_group.permission.permission_name
3090 3090 if with_members:
3091 3091 entry.members = [x.user.get_dict()
3092 3092 for x in _user_group.users_group.members]
3093 3093 perm_rows.append(entry)
3094 3094
3095 3095 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3096 3096 return perm_rows
3097 3097
3098 3098 def get_api_data(self):
3099 3099 """
3100 3100 Common function for generating api data
3101 3101
3102 3102 """
3103 3103 group = self
3104 3104 data = {
3105 3105 'group_id': group.group_id,
3106 3106 'group_name': group.group_name,
3107 3107 'group_description': group.description_safe,
3108 3108 'parent_group': group.parent_group.group_name if group.parent_group else None,
3109 3109 'repositories': [x.repo_name for x in group.repositories],
3110 3110 'owner': group.user.username,
3111 3111 }
3112 3112 return data
3113 3113
3114 3114 def get_dict(self):
3115 3115 # Since we transformed `group_name` to a hybrid property, we need to
3116 3116 # keep compatibility with the code which uses `group_name` field.
3117 3117 result = super(RepoGroup, self).get_dict()
3118 3118 result['group_name'] = result.pop('_group_name', None)
3119 3119 return result
3120 3120
3121 3121
3122 3122 class Permission(Base, BaseModel):
3123 3123 __tablename__ = 'permissions'
3124 3124 __table_args__ = (
3125 3125 Index('p_perm_name_idx', 'permission_name'),
3126 3126 base_table_args,
3127 3127 )
3128 3128
3129 3129 PERMS = [
3130 3130 ('hg.admin', _('RhodeCode Super Administrator')),
3131 3131
3132 3132 ('repository.none', _('Repository no access')),
3133 3133 ('repository.read', _('Repository read access')),
3134 3134 ('repository.write', _('Repository write access')),
3135 3135 ('repository.admin', _('Repository admin access')),
3136 3136
3137 3137 ('group.none', _('Repository group no access')),
3138 3138 ('group.read', _('Repository group read access')),
3139 3139 ('group.write', _('Repository group write access')),
3140 3140 ('group.admin', _('Repository group admin access')),
3141 3141
3142 3142 ('usergroup.none', _('User group no access')),
3143 3143 ('usergroup.read', _('User group read access')),
3144 3144 ('usergroup.write', _('User group write access')),
3145 3145 ('usergroup.admin', _('User group admin access')),
3146 3146
3147 3147 ('branch.none', _('Branch no permissions')),
3148 3148 ('branch.merge', _('Branch access by web merge')),
3149 3149 ('branch.push', _('Branch access by push')),
3150 3150 ('branch.push_force', _('Branch access by push with force')),
3151 3151
3152 3152 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3153 3153 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3154 3154
3155 3155 ('hg.usergroup.create.false', _('User Group creation disabled')),
3156 3156 ('hg.usergroup.create.true', _('User Group creation enabled')),
3157 3157
3158 3158 ('hg.create.none', _('Repository creation disabled')),
3159 3159 ('hg.create.repository', _('Repository creation enabled')),
3160 3160 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3161 3161 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3162 3162
3163 3163 ('hg.fork.none', _('Repository forking disabled')),
3164 3164 ('hg.fork.repository', _('Repository forking enabled')),
3165 3165
3166 3166 ('hg.register.none', _('Registration disabled')),
3167 3167 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3168 3168 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3169 3169
3170 3170 ('hg.password_reset.enabled', _('Password reset enabled')),
3171 3171 ('hg.password_reset.hidden', _('Password reset hidden')),
3172 3172 ('hg.password_reset.disabled', _('Password reset disabled')),
3173 3173
3174 3174 ('hg.extern_activate.manual', _('Manual activation of external account')),
3175 3175 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3176 3176
3177 3177 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3178 3178 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3179 3179 ]
3180 3180
3181 3181 # definition of system default permissions for DEFAULT user, created on
3182 3182 # system setup
3183 3183 DEFAULT_USER_PERMISSIONS = [
3184 3184 # object perms
3185 3185 'repository.read',
3186 3186 'group.read',
3187 3187 'usergroup.read',
3188 3188 # branch, for backward compat we need same value as before so forced pushed
3189 3189 'branch.push_force',
3190 3190 # global
3191 3191 'hg.create.repository',
3192 3192 'hg.repogroup.create.false',
3193 3193 'hg.usergroup.create.false',
3194 3194 'hg.create.write_on_repogroup.true',
3195 3195 'hg.fork.repository',
3196 3196 'hg.register.manual_activate',
3197 3197 'hg.password_reset.enabled',
3198 3198 'hg.extern_activate.auto',
3199 3199 'hg.inherit_default_perms.true',
3200 3200 ]
3201 3201
3202 3202 # defines which permissions are more important higher the more important
3203 3203 # Weight defines which permissions are more important.
3204 3204 # The higher number the more important.
3205 3205 PERM_WEIGHTS = {
3206 3206 'repository.none': 0,
3207 3207 'repository.read': 1,
3208 3208 'repository.write': 3,
3209 3209 'repository.admin': 4,
3210 3210
3211 3211 'group.none': 0,
3212 3212 'group.read': 1,
3213 3213 'group.write': 3,
3214 3214 'group.admin': 4,
3215 3215
3216 3216 'usergroup.none': 0,
3217 3217 'usergroup.read': 1,
3218 3218 'usergroup.write': 3,
3219 3219 'usergroup.admin': 4,
3220 3220
3221 3221 'branch.none': 0,
3222 3222 'branch.merge': 1,
3223 3223 'branch.push': 3,
3224 3224 'branch.push_force': 4,
3225 3225
3226 3226 'hg.repogroup.create.false': 0,
3227 3227 'hg.repogroup.create.true': 1,
3228 3228
3229 3229 'hg.usergroup.create.false': 0,
3230 3230 'hg.usergroup.create.true': 1,
3231 3231
3232 3232 'hg.fork.none': 0,
3233 3233 'hg.fork.repository': 1,
3234 3234 'hg.create.none': 0,
3235 3235 'hg.create.repository': 1
3236 3236 }
3237 3237
3238 3238 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3239 3239 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3240 3240 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3241 3241
3242 3242 def __unicode__(self):
3243 3243 return u"<%s('%s:%s')>" % (
3244 3244 self.__class__.__name__, self.permission_id, self.permission_name
3245 3245 )
3246 3246
3247 3247 @classmethod
3248 3248 def get_by_key(cls, key):
3249 3249 return cls.query().filter(cls.permission_name == key).scalar()
3250 3250
3251 3251 @classmethod
3252 3252 def get_default_repo_perms(cls, user_id, repo_id=None):
3253 3253 q = Session().query(UserRepoToPerm, Repository, Permission)\
3254 3254 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3255 3255 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3256 3256 .filter(UserRepoToPerm.user_id == user_id)
3257 3257 if repo_id:
3258 3258 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3259 3259 return q.all()
3260 3260
3261 3261 @classmethod
3262 3262 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3263 3263 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3264 3264 .join(
3265 3265 Permission,
3266 3266 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3267 3267 .join(
3268 3268 UserRepoToPerm,
3269 3269 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3270 3270 .filter(UserRepoToPerm.user_id == user_id)
3271 3271
3272 3272 if repo_id:
3273 3273 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3274 3274 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3275 3275
3276 3276 @classmethod
3277 3277 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3278 3278 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3279 3279 .join(
3280 3280 Permission,
3281 3281 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3282 3282 .join(
3283 3283 Repository,
3284 3284 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3285 3285 .join(
3286 3286 UserGroup,
3287 3287 UserGroupRepoToPerm.users_group_id ==
3288 3288 UserGroup.users_group_id)\
3289 3289 .join(
3290 3290 UserGroupMember,
3291 3291 UserGroupRepoToPerm.users_group_id ==
3292 3292 UserGroupMember.users_group_id)\
3293 3293 .filter(
3294 3294 UserGroupMember.user_id == user_id,
3295 3295 UserGroup.users_group_active == true())
3296 3296 if repo_id:
3297 3297 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3298 3298 return q.all()
3299 3299
3300 3300 @classmethod
3301 3301 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3302 3302 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3303 3303 .join(
3304 3304 Permission,
3305 3305 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3306 3306 .join(
3307 3307 UserGroupRepoToPerm,
3308 3308 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3309 3309 .join(
3310 3310 UserGroup,
3311 3311 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3312 3312 .join(
3313 3313 UserGroupMember,
3314 3314 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3315 3315 .filter(
3316 3316 UserGroupMember.user_id == user_id,
3317 3317 UserGroup.users_group_active == true())
3318 3318
3319 3319 if repo_id:
3320 3320 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3321 3321 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3322 3322
3323 3323 @classmethod
3324 3324 def get_default_group_perms(cls, user_id, repo_group_id=None):
3325 3325 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3326 3326 .join(
3327 3327 Permission,
3328 3328 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3329 3329 .join(
3330 3330 RepoGroup,
3331 3331 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3332 3332 .filter(UserRepoGroupToPerm.user_id == user_id)
3333 3333 if repo_group_id:
3334 3334 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3335 3335 return q.all()
3336 3336
3337 3337 @classmethod
3338 3338 def get_default_group_perms_from_user_group(
3339 3339 cls, user_id, repo_group_id=None):
3340 3340 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3341 3341 .join(
3342 3342 Permission,
3343 3343 UserGroupRepoGroupToPerm.permission_id ==
3344 3344 Permission.permission_id)\
3345 3345 .join(
3346 3346 RepoGroup,
3347 3347 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3348 3348 .join(
3349 3349 UserGroup,
3350 3350 UserGroupRepoGroupToPerm.users_group_id ==
3351 3351 UserGroup.users_group_id)\
3352 3352 .join(
3353 3353 UserGroupMember,
3354 3354 UserGroupRepoGroupToPerm.users_group_id ==
3355 3355 UserGroupMember.users_group_id)\
3356 3356 .filter(
3357 3357 UserGroupMember.user_id == user_id,
3358 3358 UserGroup.users_group_active == true())
3359 3359 if repo_group_id:
3360 3360 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3361 3361 return q.all()
3362 3362
3363 3363 @classmethod
3364 3364 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3365 3365 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3366 3366 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3367 3367 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3368 3368 .filter(UserUserGroupToPerm.user_id == user_id)
3369 3369 if user_group_id:
3370 3370 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3371 3371 return q.all()
3372 3372
3373 3373 @classmethod
3374 3374 def get_default_user_group_perms_from_user_group(
3375 3375 cls, user_id, user_group_id=None):
3376 3376 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3377 3377 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3378 3378 .join(
3379 3379 Permission,
3380 3380 UserGroupUserGroupToPerm.permission_id ==
3381 3381 Permission.permission_id)\
3382 3382 .join(
3383 3383 TargetUserGroup,
3384 3384 UserGroupUserGroupToPerm.target_user_group_id ==
3385 3385 TargetUserGroup.users_group_id)\
3386 3386 .join(
3387 3387 UserGroup,
3388 3388 UserGroupUserGroupToPerm.user_group_id ==
3389 3389 UserGroup.users_group_id)\
3390 3390 .join(
3391 3391 UserGroupMember,
3392 3392 UserGroupUserGroupToPerm.user_group_id ==
3393 3393 UserGroupMember.users_group_id)\
3394 3394 .filter(
3395 3395 UserGroupMember.user_id == user_id,
3396 3396 UserGroup.users_group_active == true())
3397 3397 if user_group_id:
3398 3398 q = q.filter(
3399 3399 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3400 3400
3401 3401 return q.all()
3402 3402
3403 3403
3404 3404 class UserRepoToPerm(Base, BaseModel):
3405 3405 __tablename__ = 'repo_to_perm'
3406 3406 __table_args__ = (
3407 3407 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3408 3408 base_table_args
3409 3409 )
3410 3410
3411 3411 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3412 3412 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3413 3413 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3414 3414 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3415 3415
3416 3416 user = relationship('User')
3417 3417 repository = relationship('Repository')
3418 3418 permission = relationship('Permission')
3419 3419
3420 3420 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3421 3421
3422 3422 @classmethod
3423 3423 def create(cls, user, repository, permission):
3424 3424 n = cls()
3425 3425 n.user = user
3426 3426 n.repository = repository
3427 3427 n.permission = permission
3428 3428 Session().add(n)
3429 3429 return n
3430 3430
3431 3431 def __unicode__(self):
3432 3432 return u'<%s => %s >' % (self.user, self.repository)
3433 3433
3434 3434
3435 3435 class UserUserGroupToPerm(Base, BaseModel):
3436 3436 __tablename__ = 'user_user_group_to_perm'
3437 3437 __table_args__ = (
3438 3438 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3439 3439 base_table_args
3440 3440 )
3441 3441
3442 3442 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3443 3443 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3444 3444 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3445 3445 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3446 3446
3447 3447 user = relationship('User')
3448 3448 user_group = relationship('UserGroup')
3449 3449 permission = relationship('Permission')
3450 3450
3451 3451 @classmethod
3452 3452 def create(cls, user, user_group, permission):
3453 3453 n = cls()
3454 3454 n.user = user
3455 3455 n.user_group = user_group
3456 3456 n.permission = permission
3457 3457 Session().add(n)
3458 3458 return n
3459 3459
3460 3460 def __unicode__(self):
3461 3461 return u'<%s => %s >' % (self.user, self.user_group)
3462 3462
3463 3463
3464 3464 class UserToPerm(Base, BaseModel):
3465 3465 __tablename__ = 'user_to_perm'
3466 3466 __table_args__ = (
3467 3467 UniqueConstraint('user_id', 'permission_id'),
3468 3468 base_table_args
3469 3469 )
3470 3470
3471 3471 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3472 3472 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3473 3473 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3474 3474
3475 3475 user = relationship('User')
3476 3476 permission = relationship('Permission', lazy='joined')
3477 3477
3478 3478 def __unicode__(self):
3479 3479 return u'<%s => %s >' % (self.user, self.permission)
3480 3480
3481 3481
3482 3482 class UserGroupRepoToPerm(Base, BaseModel):
3483 3483 __tablename__ = 'users_group_repo_to_perm'
3484 3484 __table_args__ = (
3485 3485 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3486 3486 base_table_args
3487 3487 )
3488 3488
3489 3489 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3490 3490 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3491 3491 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3492 3492 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3493 3493
3494 3494 users_group = relationship('UserGroup')
3495 3495 permission = relationship('Permission')
3496 3496 repository = relationship('Repository')
3497 3497 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3498 3498
3499 3499 @classmethod
3500 3500 def create(cls, users_group, repository, permission):
3501 3501 n = cls()
3502 3502 n.users_group = users_group
3503 3503 n.repository = repository
3504 3504 n.permission = permission
3505 3505 Session().add(n)
3506 3506 return n
3507 3507
3508 3508 def __unicode__(self):
3509 3509 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3510 3510
3511 3511
3512 3512 class UserGroupUserGroupToPerm(Base, BaseModel):
3513 3513 __tablename__ = 'user_group_user_group_to_perm'
3514 3514 __table_args__ = (
3515 3515 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3516 3516 CheckConstraint('target_user_group_id != user_group_id'),
3517 3517 base_table_args
3518 3518 )
3519 3519
3520 3520 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3521 3521 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3522 3522 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3523 3523 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3524 3524
3525 3525 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3526 3526 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3527 3527 permission = relationship('Permission')
3528 3528
3529 3529 @classmethod
3530 3530 def create(cls, target_user_group, user_group, permission):
3531 3531 n = cls()
3532 3532 n.target_user_group = target_user_group
3533 3533 n.user_group = user_group
3534 3534 n.permission = permission
3535 3535 Session().add(n)
3536 3536 return n
3537 3537
3538 3538 def __unicode__(self):
3539 3539 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3540 3540
3541 3541
3542 3542 class UserGroupToPerm(Base, BaseModel):
3543 3543 __tablename__ = 'users_group_to_perm'
3544 3544 __table_args__ = (
3545 3545 UniqueConstraint('users_group_id', 'permission_id',),
3546 3546 base_table_args
3547 3547 )
3548 3548
3549 3549 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3550 3550 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3551 3551 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3552 3552
3553 3553 users_group = relationship('UserGroup')
3554 3554 permission = relationship('Permission')
3555 3555
3556 3556
3557 3557 class UserRepoGroupToPerm(Base, BaseModel):
3558 3558 __tablename__ = 'user_repo_group_to_perm'
3559 3559 __table_args__ = (
3560 3560 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3561 3561 base_table_args
3562 3562 )
3563 3563
3564 3564 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3565 3565 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3566 3566 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3567 3567 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3568 3568
3569 3569 user = relationship('User')
3570 3570 group = relationship('RepoGroup')
3571 3571 permission = relationship('Permission')
3572 3572
3573 3573 @classmethod
3574 3574 def create(cls, user, repository_group, permission):
3575 3575 n = cls()
3576 3576 n.user = user
3577 3577 n.group = repository_group
3578 3578 n.permission = permission
3579 3579 Session().add(n)
3580 3580 return n
3581 3581
3582 3582
3583 3583 class UserGroupRepoGroupToPerm(Base, BaseModel):
3584 3584 __tablename__ = 'users_group_repo_group_to_perm'
3585 3585 __table_args__ = (
3586 3586 UniqueConstraint('users_group_id', 'group_id'),
3587 3587 base_table_args
3588 3588 )
3589 3589
3590 3590 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3591 3591 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3592 3592 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3593 3593 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3594 3594
3595 3595 users_group = relationship('UserGroup')
3596 3596 permission = relationship('Permission')
3597 3597 group = relationship('RepoGroup')
3598 3598
3599 3599 @classmethod
3600 3600 def create(cls, user_group, repository_group, permission):
3601 3601 n = cls()
3602 3602 n.users_group = user_group
3603 3603 n.group = repository_group
3604 3604 n.permission = permission
3605 3605 Session().add(n)
3606 3606 return n
3607 3607
3608 3608 def __unicode__(self):
3609 3609 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3610 3610
3611 3611
3612 3612 class Statistics(Base, BaseModel):
3613 3613 __tablename__ = 'statistics'
3614 3614 __table_args__ = (
3615 3615 base_table_args
3616 3616 )
3617 3617
3618 3618 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3619 3619 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3620 3620 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3621 3621 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3622 3622 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3623 3623 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3624 3624
3625 3625 repository = relationship('Repository', single_parent=True)
3626 3626
3627 3627
3628 3628 class UserFollowing(Base, BaseModel):
3629 3629 __tablename__ = 'user_followings'
3630 3630 __table_args__ = (
3631 3631 UniqueConstraint('user_id', 'follows_repository_id'),
3632 3632 UniqueConstraint('user_id', 'follows_user_id'),
3633 3633 base_table_args
3634 3634 )
3635 3635
3636 3636 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3637 3637 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3638 3638 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3639 3639 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3640 3640 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3641 3641
3642 3642 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3643 3643
3644 3644 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3645 3645 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3646 3646
3647 3647 @classmethod
3648 3648 def get_repo_followers(cls, repo_id):
3649 3649 return cls.query().filter(cls.follows_repo_id == repo_id)
3650 3650
3651 3651
3652 3652 class CacheKey(Base, BaseModel):
3653 3653 __tablename__ = 'cache_invalidation'
3654 3654 __table_args__ = (
3655 3655 UniqueConstraint('cache_key'),
3656 3656 Index('key_idx', 'cache_key'),
3657 3657 base_table_args,
3658 3658 )
3659 3659
3660 3660 CACHE_TYPE_FEED = 'FEED'
3661 3661
3662 3662 # namespaces used to register process/thread aware caches
3663 3663 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3664 3664 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3665 3665
3666 3666 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3667 3667 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3668 3668 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3669 3669 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3670 3670 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3671 3671
3672 3672 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3673 3673 self.cache_key = cache_key
3674 3674 self.cache_args = cache_args
3675 3675 self.cache_active = False
3676 3676 # first key should be same for all entries, since all workers should share it
3677 3677 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3678 3678
3679 3679 def __unicode__(self):
3680 3680 return u"<%s('%s:%s[%s]')>" % (
3681 3681 self.__class__.__name__,
3682 3682 self.cache_id, self.cache_key, self.cache_active)
3683 3683
3684 3684 def _cache_key_partition(self):
3685 3685 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3686 3686 return prefix, repo_name, suffix
3687 3687
3688 3688 def get_prefix(self):
3689 3689 """
3690 3690 Try to extract prefix from existing cache key. The key could consist
3691 3691 of prefix, repo_name, suffix
3692 3692 """
3693 3693 # this returns prefix, repo_name, suffix
3694 3694 return self._cache_key_partition()[0]
3695 3695
3696 3696 def get_suffix(self):
3697 3697 """
3698 3698 get suffix that might have been used in _get_cache_key to
3699 3699 generate self.cache_key. Only used for informational purposes
3700 3700 in repo_edit.mako.
3701 3701 """
3702 3702 # prefix, repo_name, suffix
3703 3703 return self._cache_key_partition()[2]
3704 3704
3705 3705 @classmethod
3706 3706 def generate_new_state_uid(cls, based_on=None):
3707 3707 if based_on:
3708 3708 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3709 3709 else:
3710 3710 return str(uuid.uuid4())
3711 3711
3712 3712 @classmethod
3713 3713 def delete_all_cache(cls):
3714 3714 """
3715 3715 Delete all cache keys from database.
3716 3716 Should only be run when all instances are down and all entries
3717 3717 thus stale.
3718 3718 """
3719 3719 cls.query().delete()
3720 3720 Session().commit()
3721 3721
3722 3722 @classmethod
3723 3723 def set_invalidate(cls, cache_uid, delete=False):
3724 3724 """
3725 3725 Mark all caches of a repo as invalid in the database.
3726 3726 """
3727 3727
3728 3728 try:
3729 3729 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3730 3730 if delete:
3731 3731 qry.delete()
3732 3732 log.debug('cache objects deleted for cache args %s',
3733 3733 safe_str(cache_uid))
3734 3734 else:
3735 3735 qry.update({"cache_active": False,
3736 3736 "cache_state_uid": cls.generate_new_state_uid()})
3737 3737 log.debug('cache objects marked as invalid for cache args %s',
3738 3738 safe_str(cache_uid))
3739 3739
3740 3740 Session().commit()
3741 3741 except Exception:
3742 3742 log.exception(
3743 3743 'Cache key invalidation failed for cache args %s',
3744 3744 safe_str(cache_uid))
3745 3745 Session().rollback()
3746 3746
3747 3747 @classmethod
3748 3748 def get_active_cache(cls, cache_key):
3749 3749 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3750 3750 if inv_obj:
3751 3751 return inv_obj
3752 3752 return None
3753 3753
3754 3754 @classmethod
3755 3755 def get_namespace_map(cls, namespace):
3756 3756 return {
3757 3757 x.cache_key: x
3758 3758 for x in cls.query().filter(cls.cache_args == namespace)}
3759 3759
3760 3760
3761 3761 class ChangesetComment(Base, BaseModel):
3762 3762 __tablename__ = 'changeset_comments'
3763 3763 __table_args__ = (
3764 3764 Index('cc_revision_idx', 'revision'),
3765 3765 base_table_args,
3766 3766 )
3767 3767
3768 3768 COMMENT_OUTDATED = u'comment_outdated'
3769 3769 COMMENT_TYPE_NOTE = u'note'
3770 3770 COMMENT_TYPE_TODO = u'todo'
3771 3771 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3772 3772
3773 3773 OP_IMMUTABLE = u'immutable'
3774 3774 OP_CHANGEABLE = u'changeable'
3775 3775
3776 3776 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3777 3777 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3778 3778 revision = Column('revision', String(40), nullable=True)
3779 3779 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3780 3780 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3781 3781 line_no = Column('line_no', Unicode(10), nullable=True)
3782 3782 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3783 3783 f_path = Column('f_path', Unicode(1000), nullable=True)
3784 3784 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3785 3785 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3786 3786 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3787 3787 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3788 3788 renderer = Column('renderer', Unicode(64), nullable=True)
3789 3789 display_state = Column('display_state', Unicode(128), nullable=True)
3790 3790 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3791 3791 draft = Column('draft', Boolean(), nullable=True, default=False)
3792 3792
3793 3793 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3794 3794 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3795 3795
3796 3796 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3797 3797 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3798 3798
3799 3799 author = relationship('User', lazy='select')
3800 3800 repo = relationship('Repository')
3801 3801 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select')
3802 3802 pull_request = relationship('PullRequest', lazy='select')
3803 3803 pull_request_version = relationship('PullRequestVersion', lazy='select')
3804 3804 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version')
3805 3805
3806 3806 @classmethod
3807 3807 def get_users(cls, revision=None, pull_request_id=None):
3808 3808 """
3809 3809 Returns user associated with this ChangesetComment. ie those
3810 3810 who actually commented
3811 3811
3812 3812 :param cls:
3813 3813 :param revision:
3814 3814 """
3815 3815 q = Session().query(User)\
3816 3816 .join(ChangesetComment.author)
3817 3817 if revision:
3818 3818 q = q.filter(cls.revision == revision)
3819 3819 elif pull_request_id:
3820 3820 q = q.filter(cls.pull_request_id == pull_request_id)
3821 3821 return q.all()
3822 3822
3823 3823 @classmethod
3824 def get_index_from_version(cls, pr_version, versions):
3825 num_versions = [x.pull_request_version_id for x in versions]
3824 def get_index_from_version(cls, pr_version, versions=None, num_versions=None):
3825
3826 if versions is not None:
3827 num_versions = [x.pull_request_version_id for x in versions]
3828
3829 num_versions = num_versions or []
3826 3830 try:
3827 3831 return num_versions.index(pr_version) + 1
3828 3832 except (IndexError, ValueError):
3829 3833 return
3830 3834
3831 3835 @property
3832 3836 def outdated(self):
3833 3837 return self.display_state == self.COMMENT_OUTDATED
3834 3838
3835 3839 @property
3836 3840 def outdated_js(self):
3837 3841 return json.dumps(self.display_state == self.COMMENT_OUTDATED)
3838 3842
3839 3843 @property
3840 3844 def immutable(self):
3841 3845 return self.immutable_state == self.OP_IMMUTABLE
3842 3846
3843 3847 def outdated_at_version(self, version):
3844 3848 """
3845 3849 Checks if comment is outdated for given pull request version
3846 3850 """
3847 3851 def version_check():
3848 3852 return self.pull_request_version_id and self.pull_request_version_id != version
3849 3853
3850 3854 if self.is_inline:
3851 3855 return self.outdated and version_check()
3852 3856 else:
3853 3857 # general comments don't have .outdated set, also latest don't have a version
3854 3858 return version_check()
3855 3859
3856 3860 def outdated_at_version_js(self, version):
3857 3861 """
3858 3862 Checks if comment is outdated for given pull request version
3859 3863 """
3860 3864 return json.dumps(self.outdated_at_version(version))
3861 3865
3862 3866 def older_than_version(self, version):
3863 3867 """
3864 3868 Checks if comment is made from previous version than given
3865 3869 """
3866 3870 if version is None:
3867 3871 return self.pull_request_version != version
3868 3872
3869 3873 return self.pull_request_version < version
3870 3874
3871 3875 def older_than_version_js(self, version):
3872 3876 """
3873 3877 Checks if comment is made from previous version than given
3874 3878 """
3875 3879 return json.dumps(self.older_than_version(version))
3876 3880
3877 3881 @property
3878 3882 def commit_id(self):
3879 3883 """New style naming to stop using .revision"""
3880 3884 return self.revision
3881 3885
3882 3886 @property
3883 3887 def resolved(self):
3884 3888 return self.resolved_by[0] if self.resolved_by else None
3885 3889
3886 3890 @property
3887 3891 def is_todo(self):
3888 3892 return self.comment_type == self.COMMENT_TYPE_TODO
3889 3893
3890 3894 @property
3891 3895 def is_inline(self):
3892 3896 if self.line_no and self.f_path:
3893 3897 return True
3894 3898 return False
3895 3899
3896 3900 @property
3897 3901 def last_version(self):
3898 3902 version = 0
3899 3903 if self.history:
3900 3904 version = self.history[-1].version
3901 3905 return version
3902 3906
3903 3907 def get_index_version(self, versions):
3904 3908 return self.get_index_from_version(
3905 3909 self.pull_request_version_id, versions)
3906 3910
3907 3911 @property
3908 3912 def review_status(self):
3909 3913 if self.status_change:
3910 3914 return self.status_change[0].status
3911 3915
3912 3916 @property
3913 3917 def review_status_lbl(self):
3914 3918 if self.status_change:
3915 3919 return self.status_change[0].status_lbl
3916 3920
3917 3921 def __repr__(self):
3918 3922 if self.comment_id:
3919 3923 return '<DB:Comment #%s>' % self.comment_id
3920 3924 else:
3921 3925 return '<DB:Comment at %#x>' % id(self)
3922 3926
3923 3927 def get_api_data(self):
3924 3928 comment = self
3925 3929
3926 3930 data = {
3927 3931 'comment_id': comment.comment_id,
3928 3932 'comment_type': comment.comment_type,
3929 3933 'comment_text': comment.text,
3930 3934 'comment_status': comment.status_change,
3931 3935 'comment_f_path': comment.f_path,
3932 3936 'comment_lineno': comment.line_no,
3933 3937 'comment_author': comment.author,
3934 3938 'comment_created_on': comment.created_on,
3935 3939 'comment_resolved_by': self.resolved,
3936 3940 'comment_commit_id': comment.revision,
3937 3941 'comment_pull_request_id': comment.pull_request_id,
3938 3942 'comment_last_version': self.last_version
3939 3943 }
3940 3944 return data
3941 3945
3942 3946 def __json__(self):
3943 3947 data = dict()
3944 3948 data.update(self.get_api_data())
3945 3949 return data
3946 3950
3947 3951
3948 3952 class ChangesetCommentHistory(Base, BaseModel):
3949 3953 __tablename__ = 'changeset_comments_history'
3950 3954 __table_args__ = (
3951 3955 Index('cch_comment_id_idx', 'comment_id'),
3952 3956 base_table_args,
3953 3957 )
3954 3958
3955 3959 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3956 3960 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3957 3961 version = Column("version", Integer(), nullable=False, default=0)
3958 3962 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3959 3963 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3960 3964 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3961 3965 deleted = Column('deleted', Boolean(), default=False)
3962 3966
3963 3967 author = relationship('User', lazy='joined')
3964 3968 comment = relationship('ChangesetComment', cascade="all, delete")
3965 3969
3966 3970 @classmethod
3967 3971 def get_version(cls, comment_id):
3968 3972 q = Session().query(ChangesetCommentHistory).filter(
3969 3973 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3970 3974 if q.count() == 0:
3971 3975 return 1
3972 3976 elif q.count() >= q[0].version:
3973 3977 return q.count() + 1
3974 3978 else:
3975 3979 return q[0].version + 1
3976 3980
3977 3981
3978 3982 class ChangesetStatus(Base, BaseModel):
3979 3983 __tablename__ = 'changeset_statuses'
3980 3984 __table_args__ = (
3981 3985 Index('cs_revision_idx', 'revision'),
3982 3986 Index('cs_version_idx', 'version'),
3983 3987 UniqueConstraint('repo_id', 'revision', 'version'),
3984 3988 base_table_args
3985 3989 )
3986 3990
3987 3991 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3988 3992 STATUS_APPROVED = 'approved'
3989 3993 STATUS_REJECTED = 'rejected'
3990 3994 STATUS_UNDER_REVIEW = 'under_review'
3991 3995 CheckConstraint,
3992 3996 STATUSES = [
3993 3997 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3994 3998 (STATUS_APPROVED, _("Approved")),
3995 3999 (STATUS_REJECTED, _("Rejected")),
3996 4000 (STATUS_UNDER_REVIEW, _("Under Review")),
3997 4001 ]
3998 4002
3999 4003 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4000 4004 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4001 4005 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4002 4006 revision = Column('revision', String(40), nullable=False)
4003 4007 status = Column('status', String(128), nullable=False, default=DEFAULT)
4004 4008 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4005 4009 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4006 4010 version = Column('version', Integer(), nullable=False, default=0)
4007 4011 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4008 4012
4009 4013 author = relationship('User', lazy='select')
4010 4014 repo = relationship('Repository', lazy='select')
4011 4015 comment = relationship('ChangesetComment', lazy='select')
4012 4016 pull_request = relationship('PullRequest', lazy='select')
4013 4017
4014 4018 def __unicode__(self):
4015 4019 return u"<%s('%s[v%s]:%s')>" % (
4016 4020 self.__class__.__name__,
4017 4021 self.status, self.version, self.author
4018 4022 )
4019 4023
4020 4024 @classmethod
4021 4025 def get_status_lbl(cls, value):
4022 4026 return dict(cls.STATUSES).get(value)
4023 4027
4024 4028 @property
4025 4029 def status_lbl(self):
4026 4030 return ChangesetStatus.get_status_lbl(self.status)
4027 4031
4028 4032 def get_api_data(self):
4029 4033 status = self
4030 4034 data = {
4031 4035 'status_id': status.changeset_status_id,
4032 4036 'status': status.status,
4033 4037 }
4034 4038 return data
4035 4039
4036 4040 def __json__(self):
4037 4041 data = dict()
4038 4042 data.update(self.get_api_data())
4039 4043 return data
4040 4044
4041 4045
4042 4046 class _SetState(object):
4043 4047 """
4044 4048 Context processor allowing changing state for sensitive operation such as
4045 4049 pull request update or merge
4046 4050 """
4047 4051
4048 4052 def __init__(self, pull_request, pr_state, back_state=None):
4049 4053 self._pr = pull_request
4050 4054 self._org_state = back_state or pull_request.pull_request_state
4051 4055 self._pr_state = pr_state
4052 4056 self._current_state = None
4053 4057
4054 4058 def __enter__(self):
4055 4059 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4056 4060 self._pr, self._pr_state)
4057 4061 self.set_pr_state(self._pr_state)
4058 4062 return self
4059 4063
4060 4064 def __exit__(self, exc_type, exc_val, exc_tb):
4061 4065 if exc_val is not None:
4062 4066 log.error(traceback.format_exc(exc_tb))
4063 4067 return None
4064 4068
4065 4069 self.set_pr_state(self._org_state)
4066 4070 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4067 4071 self._pr, self._org_state)
4068 4072
4069 4073 @property
4070 4074 def state(self):
4071 4075 return self._current_state
4072 4076
4073 4077 def set_pr_state(self, pr_state):
4074 4078 try:
4075 4079 self._pr.pull_request_state = pr_state
4076 4080 Session().add(self._pr)
4077 4081 Session().commit()
4078 4082 self._current_state = pr_state
4079 4083 except Exception:
4080 4084 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4081 4085 raise
4082 4086
4083 4087
4084 4088 class _PullRequestBase(BaseModel):
4085 4089 """
4086 4090 Common attributes of pull request and version entries.
4087 4091 """
4088 4092
4089 4093 # .status values
4090 4094 STATUS_NEW = u'new'
4091 4095 STATUS_OPEN = u'open'
4092 4096 STATUS_CLOSED = u'closed'
4093 4097
4094 4098 # available states
4095 4099 STATE_CREATING = u'creating'
4096 4100 STATE_UPDATING = u'updating'
4097 4101 STATE_MERGING = u'merging'
4098 4102 STATE_CREATED = u'created'
4099 4103
4100 4104 title = Column('title', Unicode(255), nullable=True)
4101 4105 description = Column(
4102 4106 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4103 4107 nullable=True)
4104 4108 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4105 4109
4106 4110 # new/open/closed status of pull request (not approve/reject/etc)
4107 4111 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4108 4112 created_on = Column(
4109 4113 'created_on', DateTime(timezone=False), nullable=False,
4110 4114 default=datetime.datetime.now)
4111 4115 updated_on = Column(
4112 4116 'updated_on', DateTime(timezone=False), nullable=False,
4113 4117 default=datetime.datetime.now)
4114 4118
4115 4119 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4116 4120
4117 4121 @declared_attr
4118 4122 def user_id(cls):
4119 4123 return Column(
4120 4124 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4121 4125 unique=None)
4122 4126
4123 4127 # 500 revisions max
4124 4128 _revisions = Column(
4125 4129 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4126 4130
4127 4131 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4128 4132
4129 4133 @declared_attr
4130 4134 def source_repo_id(cls):
4131 4135 # TODO: dan: rename column to source_repo_id
4132 4136 return Column(
4133 4137 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4134 4138 nullable=False)
4135 4139
4136 4140 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4137 4141
4138 4142 @hybrid_property
4139 4143 def source_ref(self):
4140 4144 return self._source_ref
4141 4145
4142 4146 @source_ref.setter
4143 4147 def source_ref(self, val):
4144 4148 parts = (val or '').split(':')
4145 4149 if len(parts) != 3:
4146 4150 raise ValueError(
4147 4151 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4148 4152 self._source_ref = safe_unicode(val)
4149 4153
4150 4154 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4151 4155
4152 4156 @hybrid_property
4153 4157 def target_ref(self):
4154 4158 return self._target_ref
4155 4159
4156 4160 @target_ref.setter
4157 4161 def target_ref(self, val):
4158 4162 parts = (val or '').split(':')
4159 4163 if len(parts) != 3:
4160 4164 raise ValueError(
4161 4165 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4162 4166 self._target_ref = safe_unicode(val)
4163 4167
4164 4168 @declared_attr
4165 4169 def target_repo_id(cls):
4166 4170 # TODO: dan: rename column to target_repo_id
4167 4171 return Column(
4168 4172 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4169 4173 nullable=False)
4170 4174
4171 4175 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4172 4176
4173 4177 # TODO: dan: rename column to last_merge_source_rev
4174 4178 _last_merge_source_rev = Column(
4175 4179 'last_merge_org_rev', String(40), nullable=True)
4176 4180 # TODO: dan: rename column to last_merge_target_rev
4177 4181 _last_merge_target_rev = Column(
4178 4182 'last_merge_other_rev', String(40), nullable=True)
4179 4183 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4180 4184 last_merge_metadata = Column(
4181 4185 'last_merge_metadata', MutationObj.as_mutable(
4182 4186 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4183 4187
4184 4188 merge_rev = Column('merge_rev', String(40), nullable=True)
4185 4189
4186 4190 reviewer_data = Column(
4187 4191 'reviewer_data_json', MutationObj.as_mutable(
4188 4192 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4189 4193
4190 4194 @property
4191 4195 def reviewer_data_json(self):
4192 4196 return json.dumps(self.reviewer_data)
4193 4197
4194 4198 @property
4195 4199 def last_merge_metadata_parsed(self):
4196 4200 metadata = {}
4197 4201 if not self.last_merge_metadata:
4198 4202 return metadata
4199 4203
4200 4204 if hasattr(self.last_merge_metadata, 'de_coerce'):
4201 4205 for k, v in self.last_merge_metadata.de_coerce().items():
4202 4206 if k in ['target_ref', 'source_ref']:
4203 4207 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4204 4208 else:
4205 4209 if hasattr(v, 'de_coerce'):
4206 4210 metadata[k] = v.de_coerce()
4207 4211 else:
4208 4212 metadata[k] = v
4209 4213 return metadata
4210 4214
4211 4215 @property
4212 4216 def work_in_progress(self):
4213 4217 """checks if pull request is work in progress by checking the title"""
4214 4218 title = self.title.upper()
4215 4219 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4216 4220 return True
4217 4221 return False
4218 4222
4219 4223 @hybrid_property
4220 4224 def description_safe(self):
4221 4225 from rhodecode.lib import helpers as h
4222 4226 return h.escape(self.description)
4223 4227
4224 4228 @hybrid_property
4225 4229 def revisions(self):
4226 4230 return self._revisions.split(':') if self._revisions else []
4227 4231
4228 4232 @revisions.setter
4229 4233 def revisions(self, val):
4230 4234 self._revisions = u':'.join(val)
4231 4235
4232 4236 @hybrid_property
4233 4237 def last_merge_status(self):
4234 4238 return safe_int(self._last_merge_status)
4235 4239
4236 4240 @last_merge_status.setter
4237 4241 def last_merge_status(self, val):
4238 4242 self._last_merge_status = val
4239 4243
4240 4244 @declared_attr
4241 4245 def author(cls):
4242 4246 return relationship('User', lazy='joined')
4243 4247
4244 4248 @declared_attr
4245 4249 def source_repo(cls):
4246 4250 return relationship(
4247 4251 'Repository',
4248 4252 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4249 4253
4250 4254 @property
4251 4255 def source_ref_parts(self):
4252 4256 return self.unicode_to_reference(self.source_ref)
4253 4257
4254 4258 @declared_attr
4255 4259 def target_repo(cls):
4256 4260 return relationship(
4257 4261 'Repository',
4258 4262 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4259 4263
4260 4264 @property
4261 4265 def target_ref_parts(self):
4262 4266 return self.unicode_to_reference(self.target_ref)
4263 4267
4264 4268 @property
4265 4269 def shadow_merge_ref(self):
4266 4270 return self.unicode_to_reference(self._shadow_merge_ref)
4267 4271
4268 4272 @shadow_merge_ref.setter
4269 4273 def shadow_merge_ref(self, ref):
4270 4274 self._shadow_merge_ref = self.reference_to_unicode(ref)
4271 4275
4272 4276 @staticmethod
4273 4277 def unicode_to_reference(raw):
4274 4278 return unicode_to_reference(raw)
4275 4279
4276 4280 @staticmethod
4277 4281 def reference_to_unicode(ref):
4278 4282 return reference_to_unicode(ref)
4279 4283
4280 4284 def get_api_data(self, with_merge_state=True):
4281 4285 from rhodecode.model.pull_request import PullRequestModel
4282 4286
4283 4287 pull_request = self
4284 4288 if with_merge_state:
4285 4289 merge_response, merge_status, msg = \
4286 4290 PullRequestModel().merge_status(pull_request)
4287 4291 merge_state = {
4288 4292 'status': merge_status,
4289 4293 'message': safe_unicode(msg),
4290 4294 }
4291 4295 else:
4292 4296 merge_state = {'status': 'not_available',
4293 4297 'message': 'not_available'}
4294 4298
4295 4299 merge_data = {
4296 4300 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4297 4301 'reference': (
4298 4302 pull_request.shadow_merge_ref._asdict()
4299 4303 if pull_request.shadow_merge_ref else None),
4300 4304 }
4301 4305
4302 4306 data = {
4303 4307 'pull_request_id': pull_request.pull_request_id,
4304 4308 'url': PullRequestModel().get_url(pull_request),
4305 4309 'title': pull_request.title,
4306 4310 'description': pull_request.description,
4307 4311 'status': pull_request.status,
4308 4312 'state': pull_request.pull_request_state,
4309 4313 'created_on': pull_request.created_on,
4310 4314 'updated_on': pull_request.updated_on,
4311 4315 'commit_ids': pull_request.revisions,
4312 4316 'review_status': pull_request.calculated_review_status(),
4313 4317 'mergeable': merge_state,
4314 4318 'source': {
4315 4319 'clone_url': pull_request.source_repo.clone_url(),
4316 4320 'repository': pull_request.source_repo.repo_name,
4317 4321 'reference': {
4318 4322 'name': pull_request.source_ref_parts.name,
4319 4323 'type': pull_request.source_ref_parts.type,
4320 4324 'commit_id': pull_request.source_ref_parts.commit_id,
4321 4325 },
4322 4326 },
4323 4327 'target': {
4324 4328 'clone_url': pull_request.target_repo.clone_url(),
4325 4329 'repository': pull_request.target_repo.repo_name,
4326 4330 'reference': {
4327 4331 'name': pull_request.target_ref_parts.name,
4328 4332 'type': pull_request.target_ref_parts.type,
4329 4333 'commit_id': pull_request.target_ref_parts.commit_id,
4330 4334 },
4331 4335 },
4332 4336 'merge': merge_data,
4333 4337 'author': pull_request.author.get_api_data(include_secrets=False,
4334 4338 details='basic'),
4335 4339 'reviewers': [
4336 4340 {
4337 4341 'user': reviewer.get_api_data(include_secrets=False,
4338 4342 details='basic'),
4339 4343 'reasons': reasons,
4340 4344 'review_status': st[0][1].status if st else 'not_reviewed',
4341 4345 }
4342 4346 for obj, reviewer, reasons, mandatory, st in
4343 4347 pull_request.reviewers_statuses()
4344 4348 ]
4345 4349 }
4346 4350
4347 4351 return data
4348 4352
4349 4353 def set_state(self, pull_request_state, final_state=None):
4350 4354 """
4351 4355 # goes from initial state to updating to initial state.
4352 4356 # initial state can be changed by specifying back_state=
4353 4357 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4354 4358 pull_request.merge()
4355 4359
4356 4360 :param pull_request_state:
4357 4361 :param final_state:
4358 4362
4359 4363 """
4360 4364
4361 4365 return _SetState(self, pull_request_state, back_state=final_state)
4362 4366
4363 4367
4364 4368 class PullRequest(Base, _PullRequestBase):
4365 4369 __tablename__ = 'pull_requests'
4366 4370 __table_args__ = (
4367 4371 base_table_args,
4368 4372 )
4369 4373 LATEST_VER = 'latest'
4370 4374
4371 4375 pull_request_id = Column(
4372 4376 'pull_request_id', Integer(), nullable=False, primary_key=True)
4373 4377
4374 4378 def __repr__(self):
4375 4379 if self.pull_request_id:
4376 4380 return '<DB:PullRequest #%s>' % self.pull_request_id
4377 4381 else:
4378 4382 return '<DB:PullRequest at %#x>' % id(self)
4379 4383
4380 4384 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4381 4385 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4382 4386 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4383 4387 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4384 4388 lazy='dynamic')
4385 4389
4386 4390 @classmethod
4387 4391 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4388 4392 internal_methods=None):
4389 4393
4390 4394 class PullRequestDisplay(object):
4391 4395 """
4392 4396 Special object wrapper for showing PullRequest data via Versions
4393 4397 It mimics PR object as close as possible. This is read only object
4394 4398 just for display
4395 4399 """
4396 4400
4397 4401 def __init__(self, attrs, internal=None):
4398 4402 self.attrs = attrs
4399 4403 # internal have priority over the given ones via attrs
4400 4404 self.internal = internal or ['versions']
4401 4405
4402 4406 def __getattr__(self, item):
4403 4407 if item in self.internal:
4404 4408 return getattr(self, item)
4405 4409 try:
4406 4410 return self.attrs[item]
4407 4411 except KeyError:
4408 4412 raise AttributeError(
4409 4413 '%s object has no attribute %s' % (self, item))
4410 4414
4411 4415 def __repr__(self):
4412 4416 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4413 4417
4414 4418 def versions(self):
4415 4419 return pull_request_obj.versions.order_by(
4416 4420 PullRequestVersion.pull_request_version_id).all()
4417 4421
4418 4422 def is_closed(self):
4419 4423 return pull_request_obj.is_closed()
4420 4424
4421 4425 def is_state_changing(self):
4422 4426 return pull_request_obj.is_state_changing()
4423 4427
4424 4428 @property
4425 4429 def pull_request_version_id(self):
4426 4430 return getattr(pull_request_obj, 'pull_request_version_id', None)
4427 4431
4428 4432 @property
4429 4433 def pull_request_last_version(self):
4430 4434 return pull_request_obj.pull_request_last_version
4431 4435
4432 4436 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4433 4437
4434 4438 attrs.author = StrictAttributeDict(
4435 4439 pull_request_obj.author.get_api_data())
4436 4440 if pull_request_obj.target_repo:
4437 4441 attrs.target_repo = StrictAttributeDict(
4438 4442 pull_request_obj.target_repo.get_api_data())
4439 4443 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4440 4444
4441 4445 if pull_request_obj.source_repo:
4442 4446 attrs.source_repo = StrictAttributeDict(
4443 4447 pull_request_obj.source_repo.get_api_data())
4444 4448 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4445 4449
4446 4450 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4447 4451 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4448 4452 attrs.revisions = pull_request_obj.revisions
4449 4453 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4450 4454 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4451 4455 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4452 4456 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4453 4457
4454 4458 return PullRequestDisplay(attrs, internal=internal_methods)
4455 4459
4456 4460 def is_closed(self):
4457 4461 return self.status == self.STATUS_CLOSED
4458 4462
4459 4463 def is_state_changing(self):
4460 4464 return self.pull_request_state != PullRequest.STATE_CREATED
4461 4465
4462 4466 def __json__(self):
4463 4467 return {
4464 4468 'revisions': self.revisions,
4465 4469 'versions': self.versions_count
4466 4470 }
4467 4471
4468 4472 def calculated_review_status(self):
4469 4473 from rhodecode.model.changeset_status import ChangesetStatusModel
4470 4474 return ChangesetStatusModel().calculated_review_status(self)
4471 4475
4472 4476 def reviewers_statuses(self):
4473 4477 from rhodecode.model.changeset_status import ChangesetStatusModel
4474 4478 return ChangesetStatusModel().reviewers_statuses(self)
4475 4479
4476 4480 def get_pull_request_reviewers(self, role=None):
4477 4481 qry = PullRequestReviewers.query()\
4478 4482 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4479 4483 if role:
4480 4484 qry = qry.filter(PullRequestReviewers.role == role)
4481 4485
4482 4486 return qry.all()
4483 4487
4484 4488 @property
4485 4489 def reviewers_count(self):
4486 4490 qry = PullRequestReviewers.query()\
4487 4491 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4488 4492 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4489 4493 return qry.count()
4490 4494
4491 4495 @property
4492 4496 def observers_count(self):
4493 4497 qry = PullRequestReviewers.query()\
4494 4498 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4495 4499 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4496 4500 return qry.count()
4497 4501
4498 4502 def observers(self):
4499 4503 qry = PullRequestReviewers.query()\
4500 4504 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4501 4505 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4502 4506 .all()
4503 4507
4504 4508 for entry in qry:
4505 4509 yield entry, entry.user
4506 4510
4507 4511 @property
4508 4512 def workspace_id(self):
4509 4513 from rhodecode.model.pull_request import PullRequestModel
4510 4514 return PullRequestModel()._workspace_id(self)
4511 4515
4512 4516 def get_shadow_repo(self):
4513 4517 workspace_id = self.workspace_id
4514 4518 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4515 4519 if os.path.isdir(shadow_repository_path):
4516 4520 vcs_obj = self.target_repo.scm_instance()
4517 4521 return vcs_obj.get_shadow_instance(shadow_repository_path)
4518 4522
4519 4523 @property
4520 4524 def versions_count(self):
4521 4525 """
4522 4526 return number of versions this PR have, e.g a PR that once been
4523 4527 updated will have 2 versions
4524 4528 """
4525 4529 return self.versions.count() + 1
4526 4530
4527 4531 @property
4528 4532 def pull_request_last_version(self):
4529 4533 return self.versions_count
4530 4534
4531 4535
4532 4536 class PullRequestVersion(Base, _PullRequestBase):
4533 4537 __tablename__ = 'pull_request_versions'
4534 4538 __table_args__ = (
4535 4539 base_table_args,
4536 4540 )
4537 4541
4538 4542 pull_request_version_id = Column(
4539 4543 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4540 4544 pull_request_id = Column(
4541 4545 'pull_request_id', Integer(),
4542 4546 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4543 4547 pull_request = relationship('PullRequest')
4544 4548
4545 4549 def __repr__(self):
4546 4550 if self.pull_request_version_id:
4547 4551 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4548 4552 else:
4549 4553 return '<DB:PullRequestVersion at %#x>' % id(self)
4550 4554
4551 4555 @property
4552 4556 def reviewers(self):
4553 4557 return self.pull_request.reviewers
4554 4558 @property
4555 4559 def reviewers(self):
4556 4560 return self.pull_request.reviewers
4557 4561
4558 4562 @property
4559 4563 def versions(self):
4560 4564 return self.pull_request.versions
4561 4565
4562 4566 def is_closed(self):
4563 4567 # calculate from original
4564 4568 return self.pull_request.status == self.STATUS_CLOSED
4565 4569
4566 4570 def is_state_changing(self):
4567 4571 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4568 4572
4569 4573 def calculated_review_status(self):
4570 4574 return self.pull_request.calculated_review_status()
4571 4575
4572 4576 def reviewers_statuses(self):
4573 4577 return self.pull_request.reviewers_statuses()
4574 4578
4575 4579 def observers(self):
4576 4580 return self.pull_request.observers()
4577 4581
4578 4582
4579 4583 class PullRequestReviewers(Base, BaseModel):
4580 4584 __tablename__ = 'pull_request_reviewers'
4581 4585 __table_args__ = (
4582 4586 base_table_args,
4583 4587 )
4584 4588 ROLE_REVIEWER = u'reviewer'
4585 4589 ROLE_OBSERVER = u'observer'
4586 4590 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4587 4591
4588 4592 @hybrid_property
4589 4593 def reasons(self):
4590 4594 if not self._reasons:
4591 4595 return []
4592 4596 return self._reasons
4593 4597
4594 4598 @reasons.setter
4595 4599 def reasons(self, val):
4596 4600 val = val or []
4597 4601 if any(not isinstance(x, compat.string_types) for x in val):
4598 4602 raise Exception('invalid reasons type, must be list of strings')
4599 4603 self._reasons = val
4600 4604
4601 4605 pull_requests_reviewers_id = Column(
4602 4606 'pull_requests_reviewers_id', Integer(), nullable=False,
4603 4607 primary_key=True)
4604 4608 pull_request_id = Column(
4605 4609 "pull_request_id", Integer(),
4606 4610 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4607 4611 user_id = Column(
4608 4612 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4609 4613 _reasons = Column(
4610 4614 'reason', MutationList.as_mutable(
4611 4615 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4612 4616
4613 4617 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4614 4618 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4615 4619
4616 4620 user = relationship('User')
4617 4621 pull_request = relationship('PullRequest')
4618 4622
4619 4623 rule_data = Column(
4620 4624 'rule_data_json',
4621 4625 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4622 4626
4623 4627 def rule_user_group_data(self):
4624 4628 """
4625 4629 Returns the voting user group rule data for this reviewer
4626 4630 """
4627 4631
4628 4632 if self.rule_data and 'vote_rule' in self.rule_data:
4629 4633 user_group_data = {}
4630 4634 if 'rule_user_group_entry_id' in self.rule_data:
4631 4635 # means a group with voting rules !
4632 4636 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4633 4637 user_group_data['name'] = self.rule_data['rule_name']
4634 4638 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4635 4639
4636 4640 return user_group_data
4637 4641
4638 4642 @classmethod
4639 4643 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4640 4644 qry = PullRequestReviewers.query()\
4641 4645 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4642 4646 if role:
4643 4647 qry = qry.filter(PullRequestReviewers.role == role)
4644 4648
4645 4649 return qry.all()
4646 4650
4647 4651 def __unicode__(self):
4648 4652 return u"<%s('id:%s')>" % (self.__class__.__name__,
4649 4653 self.pull_requests_reviewers_id)
4650 4654
4651 4655
4652 4656 class Notification(Base, BaseModel):
4653 4657 __tablename__ = 'notifications'
4654 4658 __table_args__ = (
4655 4659 Index('notification_type_idx', 'type'),
4656 4660 base_table_args,
4657 4661 )
4658 4662
4659 4663 TYPE_CHANGESET_COMMENT = u'cs_comment'
4660 4664 TYPE_MESSAGE = u'message'
4661 4665 TYPE_MENTION = u'mention'
4662 4666 TYPE_REGISTRATION = u'registration'
4663 4667 TYPE_PULL_REQUEST = u'pull_request'
4664 4668 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4665 4669 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4666 4670
4667 4671 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4668 4672 subject = Column('subject', Unicode(512), nullable=True)
4669 4673 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4670 4674 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4671 4675 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4672 4676 type_ = Column('type', Unicode(255))
4673 4677
4674 4678 created_by_user = relationship('User')
4675 4679 notifications_to_users = relationship('UserNotification', lazy='joined',
4676 4680 cascade="all, delete-orphan")
4677 4681
4678 4682 @property
4679 4683 def recipients(self):
4680 4684 return [x.user for x in UserNotification.query()\
4681 4685 .filter(UserNotification.notification == self)\
4682 4686 .order_by(UserNotification.user_id.asc()).all()]
4683 4687
4684 4688 @classmethod
4685 4689 def create(cls, created_by, subject, body, recipients, type_=None):
4686 4690 if type_ is None:
4687 4691 type_ = Notification.TYPE_MESSAGE
4688 4692
4689 4693 notification = cls()
4690 4694 notification.created_by_user = created_by
4691 4695 notification.subject = subject
4692 4696 notification.body = body
4693 4697 notification.type_ = type_
4694 4698 notification.created_on = datetime.datetime.now()
4695 4699
4696 4700 # For each recipient link the created notification to his account
4697 4701 for u in recipients:
4698 4702 assoc = UserNotification()
4699 4703 assoc.user_id = u.user_id
4700 4704 assoc.notification = notification
4701 4705
4702 4706 # if created_by is inside recipients mark his notification
4703 4707 # as read
4704 4708 if u.user_id == created_by.user_id:
4705 4709 assoc.read = True
4706 4710 Session().add(assoc)
4707 4711
4708 4712 Session().add(notification)
4709 4713
4710 4714 return notification
4711 4715
4712 4716
4713 4717 class UserNotification(Base, BaseModel):
4714 4718 __tablename__ = 'user_to_notification'
4715 4719 __table_args__ = (
4716 4720 UniqueConstraint('user_id', 'notification_id'),
4717 4721 base_table_args
4718 4722 )
4719 4723
4720 4724 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4721 4725 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4722 4726 read = Column('read', Boolean, default=False)
4723 4727 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4724 4728
4725 4729 user = relationship('User', lazy="joined")
4726 4730 notification = relationship('Notification', lazy="joined",
4727 4731 order_by=lambda: Notification.created_on.desc(),)
4728 4732
4729 4733 def mark_as_read(self):
4730 4734 self.read = True
4731 4735 Session().add(self)
4732 4736
4733 4737
4734 4738 class UserNotice(Base, BaseModel):
4735 4739 __tablename__ = 'user_notices'
4736 4740 __table_args__ = (
4737 4741 base_table_args
4738 4742 )
4739 4743
4740 4744 NOTIFICATION_TYPE_MESSAGE = 'message'
4741 4745 NOTIFICATION_TYPE_NOTICE = 'notice'
4742 4746
4743 4747 NOTIFICATION_LEVEL_INFO = 'info'
4744 4748 NOTIFICATION_LEVEL_WARNING = 'warning'
4745 4749 NOTIFICATION_LEVEL_ERROR = 'error'
4746 4750
4747 4751 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4748 4752
4749 4753 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4750 4754 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4751 4755
4752 4756 notice_read = Column('notice_read', Boolean, default=False)
4753 4757
4754 4758 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4755 4759 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4756 4760
4757 4761 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4758 4762 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4759 4763
4760 4764 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4761 4765 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4762 4766
4763 4767 @classmethod
4764 4768 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4765 4769
4766 4770 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4767 4771 cls.NOTIFICATION_LEVEL_WARNING,
4768 4772 cls.NOTIFICATION_LEVEL_INFO]:
4769 4773 return
4770 4774
4771 4775 from rhodecode.model.user import UserModel
4772 4776 user = UserModel().get_user(user)
4773 4777
4774 4778 new_notice = UserNotice()
4775 4779 if not allow_duplicate:
4776 4780 existing_msg = UserNotice().query() \
4777 4781 .filter(UserNotice.user == user) \
4778 4782 .filter(UserNotice.notice_body == body) \
4779 4783 .filter(UserNotice.notice_read == false()) \
4780 4784 .scalar()
4781 4785 if existing_msg:
4782 4786 log.warning('Ignoring duplicate notice for user %s', user)
4783 4787 return
4784 4788
4785 4789 new_notice.user = user
4786 4790 new_notice.notice_subject = subject
4787 4791 new_notice.notice_body = body
4788 4792 new_notice.notification_level = notice_level
4789 4793 Session().add(new_notice)
4790 4794 Session().commit()
4791 4795
4792 4796
4793 4797 class Gist(Base, BaseModel):
4794 4798 __tablename__ = 'gists'
4795 4799 __table_args__ = (
4796 4800 Index('g_gist_access_id_idx', 'gist_access_id'),
4797 4801 Index('g_created_on_idx', 'created_on'),
4798 4802 base_table_args
4799 4803 )
4800 4804
4801 4805 GIST_PUBLIC = u'public'
4802 4806 GIST_PRIVATE = u'private'
4803 4807 DEFAULT_FILENAME = u'gistfile1.txt'
4804 4808
4805 4809 ACL_LEVEL_PUBLIC = u'acl_public'
4806 4810 ACL_LEVEL_PRIVATE = u'acl_private'
4807 4811
4808 4812 gist_id = Column('gist_id', Integer(), primary_key=True)
4809 4813 gist_access_id = Column('gist_access_id', Unicode(250))
4810 4814 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4811 4815 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4812 4816 gist_expires = Column('gist_expires', Float(53), nullable=False)
4813 4817 gist_type = Column('gist_type', Unicode(128), nullable=False)
4814 4818 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4815 4819 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4816 4820 acl_level = Column('acl_level', Unicode(128), nullable=True)
4817 4821
4818 4822 owner = relationship('User')
4819 4823
4820 4824 def __repr__(self):
4821 4825 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4822 4826
4823 4827 @hybrid_property
4824 4828 def description_safe(self):
4825 4829 from rhodecode.lib import helpers as h
4826 4830 return h.escape(self.gist_description)
4827 4831
4828 4832 @classmethod
4829 4833 def get_or_404(cls, id_):
4830 4834 from pyramid.httpexceptions import HTTPNotFound
4831 4835
4832 4836 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4833 4837 if not res:
4834 4838 log.debug('WARN: No DB entry with id %s', id_)
4835 4839 raise HTTPNotFound()
4836 4840 return res
4837 4841
4838 4842 @classmethod
4839 4843 def get_by_access_id(cls, gist_access_id):
4840 4844 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4841 4845
4842 4846 def gist_url(self):
4843 4847 from rhodecode.model.gist import GistModel
4844 4848 return GistModel().get_url(self)
4845 4849
4846 4850 @classmethod
4847 4851 def base_path(cls):
4848 4852 """
4849 4853 Returns base path when all gists are stored
4850 4854
4851 4855 :param cls:
4852 4856 """
4853 4857 from rhodecode.model.gist import GIST_STORE_LOC
4854 4858 q = Session().query(RhodeCodeUi)\
4855 4859 .filter(RhodeCodeUi.ui_key == URL_SEP)
4856 4860 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4857 4861 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4858 4862
4859 4863 def get_api_data(self):
4860 4864 """
4861 4865 Common function for generating gist related data for API
4862 4866 """
4863 4867 gist = self
4864 4868 data = {
4865 4869 'gist_id': gist.gist_id,
4866 4870 'type': gist.gist_type,
4867 4871 'access_id': gist.gist_access_id,
4868 4872 'description': gist.gist_description,
4869 4873 'url': gist.gist_url(),
4870 4874 'expires': gist.gist_expires,
4871 4875 'created_on': gist.created_on,
4872 4876 'modified_at': gist.modified_at,
4873 4877 'content': None,
4874 4878 'acl_level': gist.acl_level,
4875 4879 }
4876 4880 return data
4877 4881
4878 4882 def __json__(self):
4879 4883 data = dict(
4880 4884 )
4881 4885 data.update(self.get_api_data())
4882 4886 return data
4883 4887 # SCM functions
4884 4888
4885 4889 def scm_instance(self, **kwargs):
4886 4890 """
4887 4891 Get an instance of VCS Repository
4888 4892
4889 4893 :param kwargs:
4890 4894 """
4891 4895 from rhodecode.model.gist import GistModel
4892 4896 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4893 4897 return get_vcs_instance(
4894 4898 repo_path=safe_str(full_repo_path), create=False,
4895 4899 _vcs_alias=GistModel.vcs_backend)
4896 4900
4897 4901
4898 4902 class ExternalIdentity(Base, BaseModel):
4899 4903 __tablename__ = 'external_identities'
4900 4904 __table_args__ = (
4901 4905 Index('local_user_id_idx', 'local_user_id'),
4902 4906 Index('external_id_idx', 'external_id'),
4903 4907 base_table_args
4904 4908 )
4905 4909
4906 4910 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4907 4911 external_username = Column('external_username', Unicode(1024), default=u'')
4908 4912 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4909 4913 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4910 4914 access_token = Column('access_token', String(1024), default=u'')
4911 4915 alt_token = Column('alt_token', String(1024), default=u'')
4912 4916 token_secret = Column('token_secret', String(1024), default=u'')
4913 4917
4914 4918 @classmethod
4915 4919 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4916 4920 """
4917 4921 Returns ExternalIdentity instance based on search params
4918 4922
4919 4923 :param external_id:
4920 4924 :param provider_name:
4921 4925 :return: ExternalIdentity
4922 4926 """
4923 4927 query = cls.query()
4924 4928 query = query.filter(cls.external_id == external_id)
4925 4929 query = query.filter(cls.provider_name == provider_name)
4926 4930 if local_user_id:
4927 4931 query = query.filter(cls.local_user_id == local_user_id)
4928 4932 return query.first()
4929 4933
4930 4934 @classmethod
4931 4935 def user_by_external_id_and_provider(cls, external_id, provider_name):
4932 4936 """
4933 4937 Returns User instance based on search params
4934 4938
4935 4939 :param external_id:
4936 4940 :param provider_name:
4937 4941 :return: User
4938 4942 """
4939 4943 query = User.query()
4940 4944 query = query.filter(cls.external_id == external_id)
4941 4945 query = query.filter(cls.provider_name == provider_name)
4942 4946 query = query.filter(User.user_id == cls.local_user_id)
4943 4947 return query.first()
4944 4948
4945 4949 @classmethod
4946 4950 def by_local_user_id(cls, local_user_id):
4947 4951 """
4948 4952 Returns all tokens for user
4949 4953
4950 4954 :param local_user_id:
4951 4955 :return: ExternalIdentity
4952 4956 """
4953 4957 query = cls.query()
4954 4958 query = query.filter(cls.local_user_id == local_user_id)
4955 4959 return query
4956 4960
4957 4961 @classmethod
4958 4962 def load_provider_plugin(cls, plugin_id):
4959 4963 from rhodecode.authentication.base import loadplugin
4960 4964 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4961 4965 auth_plugin = loadplugin(_plugin_id)
4962 4966 return auth_plugin
4963 4967
4964 4968
4965 4969 class Integration(Base, BaseModel):
4966 4970 __tablename__ = 'integrations'
4967 4971 __table_args__ = (
4968 4972 base_table_args
4969 4973 )
4970 4974
4971 4975 integration_id = Column('integration_id', Integer(), primary_key=True)
4972 4976 integration_type = Column('integration_type', String(255))
4973 4977 enabled = Column('enabled', Boolean(), nullable=False)
4974 4978 name = Column('name', String(255), nullable=False)
4975 4979 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4976 4980 default=False)
4977 4981
4978 4982 settings = Column(
4979 4983 'settings_json', MutationObj.as_mutable(
4980 4984 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4981 4985 repo_id = Column(
4982 4986 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4983 4987 nullable=True, unique=None, default=None)
4984 4988 repo = relationship('Repository', lazy='joined')
4985 4989
4986 4990 repo_group_id = Column(
4987 4991 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4988 4992 nullable=True, unique=None, default=None)
4989 4993 repo_group = relationship('RepoGroup', lazy='joined')
4990 4994
4991 4995 @property
4992 4996 def scope(self):
4993 4997 if self.repo:
4994 4998 return repr(self.repo)
4995 4999 if self.repo_group:
4996 5000 if self.child_repos_only:
4997 5001 return repr(self.repo_group) + ' (child repos only)'
4998 5002 else:
4999 5003 return repr(self.repo_group) + ' (recursive)'
5000 5004 if self.child_repos_only:
5001 5005 return 'root_repos'
5002 5006 return 'global'
5003 5007
5004 5008 def __repr__(self):
5005 5009 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5006 5010
5007 5011
5008 5012 class RepoReviewRuleUser(Base, BaseModel):
5009 5013 __tablename__ = 'repo_review_rules_users'
5010 5014 __table_args__ = (
5011 5015 base_table_args
5012 5016 )
5013 5017 ROLE_REVIEWER = u'reviewer'
5014 5018 ROLE_OBSERVER = u'observer'
5015 5019 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5016 5020
5017 5021 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5018 5022 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5019 5023 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5020 5024 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5021 5025 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5022 5026 user = relationship('User')
5023 5027
5024 5028 def rule_data(self):
5025 5029 return {
5026 5030 'mandatory': self.mandatory,
5027 5031 'role': self.role,
5028 5032 }
5029 5033
5030 5034
5031 5035 class RepoReviewRuleUserGroup(Base, BaseModel):
5032 5036 __tablename__ = 'repo_review_rules_users_groups'
5033 5037 __table_args__ = (
5034 5038 base_table_args
5035 5039 )
5036 5040
5037 5041 VOTE_RULE_ALL = -1
5038 5042 ROLE_REVIEWER = u'reviewer'
5039 5043 ROLE_OBSERVER = u'observer'
5040 5044 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5041 5045
5042 5046 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5043 5047 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5044 5048 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5045 5049 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5046 5050 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5047 5051 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5048 5052 users_group = relationship('UserGroup')
5049 5053
5050 5054 def rule_data(self):
5051 5055 return {
5052 5056 'mandatory': self.mandatory,
5053 5057 'role': self.role,
5054 5058 'vote_rule': self.vote_rule
5055 5059 }
5056 5060
5057 5061 @property
5058 5062 def vote_rule_label(self):
5059 5063 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5060 5064 return 'all must vote'
5061 5065 else:
5062 5066 return 'min. vote {}'.format(self.vote_rule)
5063 5067
5064 5068
5065 5069 class RepoReviewRule(Base, BaseModel):
5066 5070 __tablename__ = 'repo_review_rules'
5067 5071 __table_args__ = (
5068 5072 base_table_args
5069 5073 )
5070 5074
5071 5075 repo_review_rule_id = Column(
5072 5076 'repo_review_rule_id', Integer(), primary_key=True)
5073 5077 repo_id = Column(
5074 5078 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5075 5079 repo = relationship('Repository', backref='review_rules')
5076 5080
5077 5081 review_rule_name = Column('review_rule_name', String(255))
5078 5082 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5079 5083 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5080 5084 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5081 5085
5082 5086 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5083 5087
5084 5088 # Legacy fields, just for backward compat
5085 5089 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5086 5090 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5087 5091
5088 5092 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5089 5093 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5090 5094
5091 5095 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5092 5096
5093 5097 rule_users = relationship('RepoReviewRuleUser')
5094 5098 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5095 5099
5096 5100 def _validate_pattern(self, value):
5097 5101 re.compile('^' + glob2re(value) + '$')
5098 5102
5099 5103 @hybrid_property
5100 5104 def source_branch_pattern(self):
5101 5105 return self._branch_pattern or '*'
5102 5106
5103 5107 @source_branch_pattern.setter
5104 5108 def source_branch_pattern(self, value):
5105 5109 self._validate_pattern(value)
5106 5110 self._branch_pattern = value or '*'
5107 5111
5108 5112 @hybrid_property
5109 5113 def target_branch_pattern(self):
5110 5114 return self._target_branch_pattern or '*'
5111 5115
5112 5116 @target_branch_pattern.setter
5113 5117 def target_branch_pattern(self, value):
5114 5118 self._validate_pattern(value)
5115 5119 self._target_branch_pattern = value or '*'
5116 5120
5117 5121 @hybrid_property
5118 5122 def file_pattern(self):
5119 5123 return self._file_pattern or '*'
5120 5124
5121 5125 @file_pattern.setter
5122 5126 def file_pattern(self, value):
5123 5127 self._validate_pattern(value)
5124 5128 self._file_pattern = value or '*'
5125 5129
5126 5130 @hybrid_property
5127 5131 def forbid_pr_author_to_review(self):
5128 5132 return self.pr_author == 'forbid_pr_author'
5129 5133
5130 5134 @hybrid_property
5131 5135 def include_pr_author_to_review(self):
5132 5136 return self.pr_author == 'include_pr_author'
5133 5137
5134 5138 @hybrid_property
5135 5139 def forbid_commit_author_to_review(self):
5136 5140 return self.commit_author == 'forbid_commit_author'
5137 5141
5138 5142 @hybrid_property
5139 5143 def include_commit_author_to_review(self):
5140 5144 return self.commit_author == 'include_commit_author'
5141 5145
5142 5146 def matches(self, source_branch, target_branch, files_changed):
5143 5147 """
5144 5148 Check if this review rule matches a branch/files in a pull request
5145 5149
5146 5150 :param source_branch: source branch name for the commit
5147 5151 :param target_branch: target branch name for the commit
5148 5152 :param files_changed: list of file paths changed in the pull request
5149 5153 """
5150 5154
5151 5155 source_branch = source_branch or ''
5152 5156 target_branch = target_branch or ''
5153 5157 files_changed = files_changed or []
5154 5158
5155 5159 branch_matches = True
5156 5160 if source_branch or target_branch:
5157 5161 if self.source_branch_pattern == '*':
5158 5162 source_branch_match = True
5159 5163 else:
5160 5164 if self.source_branch_pattern.startswith('re:'):
5161 5165 source_pattern = self.source_branch_pattern[3:]
5162 5166 else:
5163 5167 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5164 5168 source_branch_regex = re.compile(source_pattern)
5165 5169 source_branch_match = bool(source_branch_regex.search(source_branch))
5166 5170 if self.target_branch_pattern == '*':
5167 5171 target_branch_match = True
5168 5172 else:
5169 5173 if self.target_branch_pattern.startswith('re:'):
5170 5174 target_pattern = self.target_branch_pattern[3:]
5171 5175 else:
5172 5176 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5173 5177 target_branch_regex = re.compile(target_pattern)
5174 5178 target_branch_match = bool(target_branch_regex.search(target_branch))
5175 5179
5176 5180 branch_matches = source_branch_match and target_branch_match
5177 5181
5178 5182 files_matches = True
5179 5183 if self.file_pattern != '*':
5180 5184 files_matches = False
5181 5185 if self.file_pattern.startswith('re:'):
5182 5186 file_pattern = self.file_pattern[3:]
5183 5187 else:
5184 5188 file_pattern = glob2re(self.file_pattern)
5185 5189 file_regex = re.compile(file_pattern)
5186 5190 for file_data in files_changed:
5187 5191 filename = file_data.get('filename')
5188 5192
5189 5193 if file_regex.search(filename):
5190 5194 files_matches = True
5191 5195 break
5192 5196
5193 5197 return branch_matches and files_matches
5194 5198
5195 5199 @property
5196 5200 def review_users(self):
5197 5201 """ Returns the users which this rule applies to """
5198 5202
5199 5203 users = collections.OrderedDict()
5200 5204
5201 5205 for rule_user in self.rule_users:
5202 5206 if rule_user.user.active:
5203 5207 if rule_user.user not in users:
5204 5208 users[rule_user.user.username] = {
5205 5209 'user': rule_user.user,
5206 5210 'source': 'user',
5207 5211 'source_data': {},
5208 5212 'data': rule_user.rule_data()
5209 5213 }
5210 5214
5211 5215 for rule_user_group in self.rule_user_groups:
5212 5216 source_data = {
5213 5217 'user_group_id': rule_user_group.users_group.users_group_id,
5214 5218 'name': rule_user_group.users_group.users_group_name,
5215 5219 'members': len(rule_user_group.users_group.members)
5216 5220 }
5217 5221 for member in rule_user_group.users_group.members:
5218 5222 if member.user.active:
5219 5223 key = member.user.username
5220 5224 if key in users:
5221 5225 # skip this member as we have him already
5222 5226 # this prevents from override the "first" matched
5223 5227 # users with duplicates in multiple groups
5224 5228 continue
5225 5229
5226 5230 users[key] = {
5227 5231 'user': member.user,
5228 5232 'source': 'user_group',
5229 5233 'source_data': source_data,
5230 5234 'data': rule_user_group.rule_data()
5231 5235 }
5232 5236
5233 5237 return users
5234 5238
5235 5239 def user_group_vote_rule(self, user_id):
5236 5240
5237 5241 rules = []
5238 5242 if not self.rule_user_groups:
5239 5243 return rules
5240 5244
5241 5245 for user_group in self.rule_user_groups:
5242 5246 user_group_members = [x.user_id for x in user_group.users_group.members]
5243 5247 if user_id in user_group_members:
5244 5248 rules.append(user_group)
5245 5249 return rules
5246 5250
5247 5251 def __repr__(self):
5248 5252 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
5249 5253 self.repo_review_rule_id, self.repo)
5250 5254
5251 5255
5252 5256 class ScheduleEntry(Base, BaseModel):
5253 5257 __tablename__ = 'schedule_entries'
5254 5258 __table_args__ = (
5255 5259 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5256 5260 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5257 5261 base_table_args,
5258 5262 )
5259 5263
5260 5264 schedule_types = ['crontab', 'timedelta', 'integer']
5261 5265 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5262 5266
5263 5267 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5264 5268 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5265 5269 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5266 5270
5267 5271 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5268 5272 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5269 5273
5270 5274 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5271 5275 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5272 5276
5273 5277 # task
5274 5278 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5275 5279 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5276 5280 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5277 5281 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5278 5282
5279 5283 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5280 5284 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5281 5285
5282 5286 @hybrid_property
5283 5287 def schedule_type(self):
5284 5288 return self._schedule_type
5285 5289
5286 5290 @schedule_type.setter
5287 5291 def schedule_type(self, val):
5288 5292 if val not in self.schedule_types:
5289 5293 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5290 5294 val, self.schedule_type))
5291 5295
5292 5296 self._schedule_type = val
5293 5297
5294 5298 @classmethod
5295 5299 def get_uid(cls, obj):
5296 5300 args = obj.task_args
5297 5301 kwargs = obj.task_kwargs
5298 5302 if isinstance(args, JsonRaw):
5299 5303 try:
5300 5304 args = json.loads(args)
5301 5305 except ValueError:
5302 5306 args = tuple()
5303 5307
5304 5308 if isinstance(kwargs, JsonRaw):
5305 5309 try:
5306 5310 kwargs = json.loads(kwargs)
5307 5311 except ValueError:
5308 5312 kwargs = dict()
5309 5313
5310 5314 dot_notation = obj.task_dot_notation
5311 5315 val = '.'.join(map(safe_str, [
5312 5316 sorted(dot_notation), args, sorted(kwargs.items())]))
5313 5317 return hashlib.sha1(val).hexdigest()
5314 5318
5315 5319 @classmethod
5316 5320 def get_by_schedule_name(cls, schedule_name):
5317 5321 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5318 5322
5319 5323 @classmethod
5320 5324 def get_by_schedule_id(cls, schedule_id):
5321 5325 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5322 5326
5323 5327 @property
5324 5328 def task(self):
5325 5329 return self.task_dot_notation
5326 5330
5327 5331 @property
5328 5332 def schedule(self):
5329 5333 from rhodecode.lib.celerylib.utils import raw_2_schedule
5330 5334 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5331 5335 return schedule
5332 5336
5333 5337 @property
5334 5338 def args(self):
5335 5339 try:
5336 5340 return list(self.task_args or [])
5337 5341 except ValueError:
5338 5342 return list()
5339 5343
5340 5344 @property
5341 5345 def kwargs(self):
5342 5346 try:
5343 5347 return dict(self.task_kwargs or {})
5344 5348 except ValueError:
5345 5349 return dict()
5346 5350
5347 5351 def _as_raw(self, val, indent=None):
5348 5352 if hasattr(val, 'de_coerce'):
5349 5353 val = val.de_coerce()
5350 5354 if val:
5351 5355 val = json.dumps(val, indent=indent, sort_keys=True)
5352 5356
5353 5357 return val
5354 5358
5355 5359 @property
5356 5360 def schedule_definition_raw(self):
5357 5361 return self._as_raw(self.schedule_definition)
5358 5362
5359 5363 def args_raw(self, indent=None):
5360 5364 return self._as_raw(self.task_args, indent)
5361 5365
5362 5366 def kwargs_raw(self, indent=None):
5363 5367 return self._as_raw(self.task_kwargs, indent)
5364 5368
5365 5369 def __repr__(self):
5366 5370 return '<DB:ScheduleEntry({}:{})>'.format(
5367 5371 self.schedule_entry_id, self.schedule_name)
5368 5372
5369 5373
5370 5374 @event.listens_for(ScheduleEntry, 'before_update')
5371 5375 def update_task_uid(mapper, connection, target):
5372 5376 target.task_uid = ScheduleEntry.get_uid(target)
5373 5377
5374 5378
5375 5379 @event.listens_for(ScheduleEntry, 'before_insert')
5376 5380 def set_task_uid(mapper, connection, target):
5377 5381 target.task_uid = ScheduleEntry.get_uid(target)
5378 5382
5379 5383
5380 5384 class _BaseBranchPerms(BaseModel):
5381 5385 @classmethod
5382 5386 def compute_hash(cls, value):
5383 5387 return sha1_safe(value)
5384 5388
5385 5389 @hybrid_property
5386 5390 def branch_pattern(self):
5387 5391 return self._branch_pattern or '*'
5388 5392
5389 5393 @hybrid_property
5390 5394 def branch_hash(self):
5391 5395 return self._branch_hash
5392 5396
5393 5397 def _validate_glob(self, value):
5394 5398 re.compile('^' + glob2re(value) + '$')
5395 5399
5396 5400 @branch_pattern.setter
5397 5401 def branch_pattern(self, value):
5398 5402 self._validate_glob(value)
5399 5403 self._branch_pattern = value or '*'
5400 5404 # set the Hash when setting the branch pattern
5401 5405 self._branch_hash = self.compute_hash(self._branch_pattern)
5402 5406
5403 5407 def matches(self, branch):
5404 5408 """
5405 5409 Check if this the branch matches entry
5406 5410
5407 5411 :param branch: branch name for the commit
5408 5412 """
5409 5413
5410 5414 branch = branch or ''
5411 5415
5412 5416 branch_matches = True
5413 5417 if branch:
5414 5418 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5415 5419 branch_matches = bool(branch_regex.search(branch))
5416 5420
5417 5421 return branch_matches
5418 5422
5419 5423
5420 5424 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5421 5425 __tablename__ = 'user_to_repo_branch_permissions'
5422 5426 __table_args__ = (
5423 5427 base_table_args
5424 5428 )
5425 5429
5426 5430 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5427 5431
5428 5432 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5429 5433 repo = relationship('Repository', backref='user_branch_perms')
5430 5434
5431 5435 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5432 5436 permission = relationship('Permission')
5433 5437
5434 5438 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5435 5439 user_repo_to_perm = relationship('UserRepoToPerm')
5436 5440
5437 5441 rule_order = Column('rule_order', Integer(), nullable=False)
5438 5442 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5439 5443 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5440 5444
5441 5445 def __unicode__(self):
5442 5446 return u'<UserBranchPermission(%s => %r)>' % (
5443 5447 self.user_repo_to_perm, self.branch_pattern)
5444 5448
5445 5449
5446 5450 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5447 5451 __tablename__ = 'user_group_to_repo_branch_permissions'
5448 5452 __table_args__ = (
5449 5453 base_table_args
5450 5454 )
5451 5455
5452 5456 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5453 5457
5454 5458 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5455 5459 repo = relationship('Repository', backref='user_group_branch_perms')
5456 5460
5457 5461 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5458 5462 permission = relationship('Permission')
5459 5463
5460 5464 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5461 5465 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5462 5466
5463 5467 rule_order = Column('rule_order', Integer(), nullable=False)
5464 5468 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5465 5469 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5466 5470
5467 5471 def __unicode__(self):
5468 5472 return u'<UserBranchPermission(%s => %r)>' % (
5469 5473 self.user_group_repo_to_perm, self.branch_pattern)
5470 5474
5471 5475
5472 5476 class UserBookmark(Base, BaseModel):
5473 5477 __tablename__ = 'user_bookmarks'
5474 5478 __table_args__ = (
5475 5479 UniqueConstraint('user_id', 'bookmark_repo_id'),
5476 5480 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5477 5481 UniqueConstraint('user_id', 'bookmark_position'),
5478 5482 base_table_args
5479 5483 )
5480 5484
5481 5485 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5482 5486 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5483 5487 position = Column("bookmark_position", Integer(), nullable=False)
5484 5488 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5485 5489 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5486 5490 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5487 5491
5488 5492 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5489 5493 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5490 5494
5491 5495 user = relationship("User")
5492 5496
5493 5497 repository = relationship("Repository")
5494 5498 repository_group = relationship("RepoGroup")
5495 5499
5496 5500 @classmethod
5497 5501 def get_by_position_for_user(cls, position, user_id):
5498 5502 return cls.query() \
5499 5503 .filter(UserBookmark.user_id == user_id) \
5500 5504 .filter(UserBookmark.position == position).scalar()
5501 5505
5502 5506 @classmethod
5503 5507 def get_bookmarks_for_user(cls, user_id, cache=True):
5504 5508 bookmarks = cls.query() \
5505 5509 .filter(UserBookmark.user_id == user_id) \
5506 5510 .options(joinedload(UserBookmark.repository)) \
5507 5511 .options(joinedload(UserBookmark.repository_group)) \
5508 5512 .order_by(UserBookmark.position.asc())
5509 5513
5510 5514 if cache:
5511 5515 bookmarks = bookmarks.options(
5512 5516 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5513 5517 )
5514 5518
5515 5519 return bookmarks.all()
5516 5520
5517 5521 def __unicode__(self):
5518 5522 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5519 5523
5520 5524
5521 5525 class FileStore(Base, BaseModel):
5522 5526 __tablename__ = 'file_store'
5523 5527 __table_args__ = (
5524 5528 base_table_args
5525 5529 )
5526 5530
5527 5531 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5528 5532 file_uid = Column('file_uid', String(1024), nullable=False)
5529 5533 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5530 5534 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5531 5535 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5532 5536
5533 5537 # sha256 hash
5534 5538 file_hash = Column('file_hash', String(512), nullable=False)
5535 5539 file_size = Column('file_size', BigInteger(), nullable=False)
5536 5540
5537 5541 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5538 5542 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5539 5543 accessed_count = Column('accessed_count', Integer(), default=0)
5540 5544
5541 5545 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5542 5546
5543 5547 # if repo/repo_group reference is set, check for permissions
5544 5548 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5545 5549
5546 5550 # hidden defines an attachment that should be hidden from showing in artifact listing
5547 5551 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5548 5552
5549 5553 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5550 5554 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5551 5555
5552 5556 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5553 5557
5554 5558 # scope limited to user, which requester have access to
5555 5559 scope_user_id = Column(
5556 5560 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5557 5561 nullable=True, unique=None, default=None)
5558 5562 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5559 5563
5560 5564 # scope limited to user group, which requester have access to
5561 5565 scope_user_group_id = Column(
5562 5566 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5563 5567 nullable=True, unique=None, default=None)
5564 5568 user_group = relationship('UserGroup', lazy='joined')
5565 5569
5566 5570 # scope limited to repo, which requester have access to
5567 5571 scope_repo_id = Column(
5568 5572 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5569 5573 nullable=True, unique=None, default=None)
5570 5574 repo = relationship('Repository', lazy='joined')
5571 5575
5572 5576 # scope limited to repo group, which requester have access to
5573 5577 scope_repo_group_id = Column(
5574 5578 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5575 5579 nullable=True, unique=None, default=None)
5576 5580 repo_group = relationship('RepoGroup', lazy='joined')
5577 5581
5578 5582 @classmethod
5579 5583 def get_by_store_uid(cls, file_store_uid, safe=False):
5580 5584 if safe:
5581 5585 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5582 5586 else:
5583 5587 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5584 5588
5585 5589 @classmethod
5586 5590 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5587 5591 file_description='', enabled=True, hidden=False, check_acl=True,
5588 5592 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5589 5593
5590 5594 store_entry = FileStore()
5591 5595 store_entry.file_uid = file_uid
5592 5596 store_entry.file_display_name = file_display_name
5593 5597 store_entry.file_org_name = filename
5594 5598 store_entry.file_size = file_size
5595 5599 store_entry.file_hash = file_hash
5596 5600 store_entry.file_description = file_description
5597 5601
5598 5602 store_entry.check_acl = check_acl
5599 5603 store_entry.enabled = enabled
5600 5604 store_entry.hidden = hidden
5601 5605
5602 5606 store_entry.user_id = user_id
5603 5607 store_entry.scope_user_id = scope_user_id
5604 5608 store_entry.scope_repo_id = scope_repo_id
5605 5609 store_entry.scope_repo_group_id = scope_repo_group_id
5606 5610
5607 5611 return store_entry
5608 5612
5609 5613 @classmethod
5610 5614 def store_metadata(cls, file_store_id, args, commit=True):
5611 5615 file_store = FileStore.get(file_store_id)
5612 5616 if file_store is None:
5613 5617 return
5614 5618
5615 5619 for section, key, value, value_type in args:
5616 5620 has_key = FileStoreMetadata().query() \
5617 5621 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5618 5622 .filter(FileStoreMetadata.file_store_meta_section == section) \
5619 5623 .filter(FileStoreMetadata.file_store_meta_key == key) \
5620 5624 .scalar()
5621 5625 if has_key:
5622 5626 msg = 'key `{}` already defined under section `{}` for this file.'\
5623 5627 .format(key, section)
5624 5628 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5625 5629
5626 5630 # NOTE(marcink): raises ArtifactMetadataBadValueType
5627 5631 FileStoreMetadata.valid_value_type(value_type)
5628 5632
5629 5633 meta_entry = FileStoreMetadata()
5630 5634 meta_entry.file_store = file_store
5631 5635 meta_entry.file_store_meta_section = section
5632 5636 meta_entry.file_store_meta_key = key
5633 5637 meta_entry.file_store_meta_value_type = value_type
5634 5638 meta_entry.file_store_meta_value = value
5635 5639
5636 5640 Session().add(meta_entry)
5637 5641
5638 5642 try:
5639 5643 if commit:
5640 5644 Session().commit()
5641 5645 except IntegrityError:
5642 5646 Session().rollback()
5643 5647 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5644 5648
5645 5649 @classmethod
5646 5650 def bump_access_counter(cls, file_uid, commit=True):
5647 5651 FileStore().query()\
5648 5652 .filter(FileStore.file_uid == file_uid)\
5649 5653 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5650 5654 FileStore.accessed_on: datetime.datetime.now()})
5651 5655 if commit:
5652 5656 Session().commit()
5653 5657
5654 5658 def __json__(self):
5655 5659 data = {
5656 5660 'filename': self.file_display_name,
5657 5661 'filename_org': self.file_org_name,
5658 5662 'file_uid': self.file_uid,
5659 5663 'description': self.file_description,
5660 5664 'hidden': self.hidden,
5661 5665 'size': self.file_size,
5662 5666 'created_on': self.created_on,
5663 5667 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5664 5668 'downloaded_times': self.accessed_count,
5665 5669 'sha256': self.file_hash,
5666 5670 'metadata': self.file_metadata,
5667 5671 }
5668 5672
5669 5673 return data
5670 5674
5671 5675 def __repr__(self):
5672 5676 return '<FileStore({})>'.format(self.file_store_id)
5673 5677
5674 5678
5675 5679 class FileStoreMetadata(Base, BaseModel):
5676 5680 __tablename__ = 'file_store_metadata'
5677 5681 __table_args__ = (
5678 5682 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5679 5683 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5680 5684 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5681 5685 base_table_args
5682 5686 )
5683 5687 SETTINGS_TYPES = {
5684 5688 'str': safe_str,
5685 5689 'int': safe_int,
5686 5690 'unicode': safe_unicode,
5687 5691 'bool': str2bool,
5688 5692 'list': functools.partial(aslist, sep=',')
5689 5693 }
5690 5694
5691 5695 file_store_meta_id = Column(
5692 5696 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5693 5697 primary_key=True)
5694 5698 _file_store_meta_section = Column(
5695 5699 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5696 5700 nullable=True, unique=None, default=None)
5697 5701 _file_store_meta_section_hash = Column(
5698 5702 "file_store_meta_section_hash", String(255),
5699 5703 nullable=True, unique=None, default=None)
5700 5704 _file_store_meta_key = Column(
5701 5705 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5702 5706 nullable=True, unique=None, default=None)
5703 5707 _file_store_meta_key_hash = Column(
5704 5708 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5705 5709 _file_store_meta_value = Column(
5706 5710 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5707 5711 nullable=True, unique=None, default=None)
5708 5712 _file_store_meta_value_type = Column(
5709 5713 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5710 5714 default='unicode')
5711 5715
5712 5716 file_store_id = Column(
5713 5717 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5714 5718 nullable=True, unique=None, default=None)
5715 5719
5716 5720 file_store = relationship('FileStore', lazy='joined')
5717 5721
5718 5722 @classmethod
5719 5723 def valid_value_type(cls, value):
5720 5724 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5721 5725 raise ArtifactMetadataBadValueType(
5722 5726 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5723 5727
5724 5728 @hybrid_property
5725 5729 def file_store_meta_section(self):
5726 5730 return self._file_store_meta_section
5727 5731
5728 5732 @file_store_meta_section.setter
5729 5733 def file_store_meta_section(self, value):
5730 5734 self._file_store_meta_section = value
5731 5735 self._file_store_meta_section_hash = _hash_key(value)
5732 5736
5733 5737 @hybrid_property
5734 5738 def file_store_meta_key(self):
5735 5739 return self._file_store_meta_key
5736 5740
5737 5741 @file_store_meta_key.setter
5738 5742 def file_store_meta_key(self, value):
5739 5743 self._file_store_meta_key = value
5740 5744 self._file_store_meta_key_hash = _hash_key(value)
5741 5745
5742 5746 @hybrid_property
5743 5747 def file_store_meta_value(self):
5744 5748 val = self._file_store_meta_value
5745 5749
5746 5750 if self._file_store_meta_value_type:
5747 5751 # e.g unicode.encrypted == unicode
5748 5752 _type = self._file_store_meta_value_type.split('.')[0]
5749 5753 # decode the encrypted value if it's encrypted field type
5750 5754 if '.encrypted' in self._file_store_meta_value_type:
5751 5755 cipher = EncryptedTextValue()
5752 5756 val = safe_unicode(cipher.process_result_value(val, None))
5753 5757 # do final type conversion
5754 5758 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5755 5759 val = converter(val)
5756 5760
5757 5761 return val
5758 5762
5759 5763 @file_store_meta_value.setter
5760 5764 def file_store_meta_value(self, val):
5761 5765 val = safe_unicode(val)
5762 5766 # encode the encrypted value
5763 5767 if '.encrypted' in self.file_store_meta_value_type:
5764 5768 cipher = EncryptedTextValue()
5765 5769 val = safe_unicode(cipher.process_bind_param(val, None))
5766 5770 self._file_store_meta_value = val
5767 5771
5768 5772 @hybrid_property
5769 5773 def file_store_meta_value_type(self):
5770 5774 return self._file_store_meta_value_type
5771 5775
5772 5776 @file_store_meta_value_type.setter
5773 5777 def file_store_meta_value_type(self, val):
5774 5778 # e.g unicode.encrypted
5775 5779 self.valid_value_type(val)
5776 5780 self._file_store_meta_value_type = val
5777 5781
5778 5782 def __json__(self):
5779 5783 data = {
5780 5784 'artifact': self.file_store.file_uid,
5781 5785 'section': self.file_store_meta_section,
5782 5786 'key': self.file_store_meta_key,
5783 5787 'value': self.file_store_meta_value,
5784 5788 }
5785 5789
5786 5790 return data
5787 5791
5788 5792 def __repr__(self):
5789 5793 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5790 5794 self.file_store_meta_key, self.file_store_meta_value)
5791 5795
5792 5796
5793 5797 class DbMigrateVersion(Base, BaseModel):
5794 5798 __tablename__ = 'db_migrate_version'
5795 5799 __table_args__ = (
5796 5800 base_table_args,
5797 5801 )
5798 5802
5799 5803 repository_id = Column('repository_id', String(250), primary_key=True)
5800 5804 repository_path = Column('repository_path', Text)
5801 5805 version = Column('version', Integer)
5802 5806
5803 5807 @classmethod
5804 5808 def set_version(cls, version):
5805 5809 """
5806 5810 Helper for forcing a different version, usually for debugging purposes via ishell.
5807 5811 """
5808 5812 ver = DbMigrateVersion.query().first()
5809 5813 ver.version = version
5810 5814 Session().commit()
5811 5815
5812 5816
5813 5817 class DbSession(Base, BaseModel):
5814 5818 __tablename__ = 'db_session'
5815 5819 __table_args__ = (
5816 5820 base_table_args,
5817 5821 )
5818 5822
5819 5823 def __repr__(self):
5820 5824 return '<DB:DbSession({})>'.format(self.id)
5821 5825
5822 5826 id = Column('id', Integer())
5823 5827 namespace = Column('namespace', String(255), primary_key=True)
5824 5828 accessed = Column('accessed', DateTime, nullable=False)
5825 5829 created = Column('created', DateTime, nullable=False)
5826 5830 data = Column('data', PickleType, nullable=False)
@@ -1,2233 +1,2247 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 log.debug('Obtaining commit authors from set of commits')
158 158 _compare_data = target_scm.compare(
159 159 target_ref, source_ref, source_scm, merge=True,
160 160 pre_load=["author", "date", "message"]
161 161 )
162 162
163 163 for commit in _compare_data:
164 164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 165 # at this function which is later called via JSON serialization
166 166 serialized_commit = dict(
167 167 author=commit.author,
168 168 date=commit.date,
169 169 message=commit.message,
170 170 commit_id=commit.raw_id,
171 171 raw_id=commit.raw_id
172 172 )
173 173 commits.append(serialized_commit)
174 174 user = User.get_from_cs_author(serialized_commit['author'])
175 175 if user and user not in commit_authors:
176 176 commit_authors.append(user)
177 177
178 178 # lines
179 179 if get_authors:
180 180 log.debug('Calculating authors of changed files')
181 181 target_commit = source_repo.get_commit(ancestor_id)
182 182
183 183 for fname, lines in changed_lines.items():
184 184
185 185 try:
186 186 node = target_commit.get_node(fname, pre_load=["is_binary"])
187 187 except Exception:
188 188 log.exception("Failed to load node with path %s", fname)
189 189 continue
190 190
191 191 if not isinstance(node, FileNode):
192 192 continue
193 193
194 194 # NOTE(marcink): for binary node we don't do annotation, just use last author
195 195 if node.is_binary:
196 196 author = node.last_commit.author
197 197 email = node.last_commit.author_email
198 198
199 199 user = User.get_from_cs_author(author)
200 200 if user:
201 201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
202 202 author_counts[author] = author_counts.get(author, 0) + 1
203 203 email_counts[email] = email_counts.get(email, 0) + 1
204 204
205 205 continue
206 206
207 207 for annotation in node.annotate:
208 208 line_no, commit_id, get_commit_func, line_text = annotation
209 209 if line_no in lines:
210 210 if commit_id not in _commit_cache:
211 211 _commit_cache[commit_id] = get_commit_func()
212 212 commit = _commit_cache[commit_id]
213 213 author = commit.author
214 214 email = commit.author_email
215 215 user = User.get_from_cs_author(author)
216 216 if user:
217 217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
218 218 author_counts[author] = author_counts.get(author, 0) + 1
219 219 email_counts[email] = email_counts.get(email, 0) + 1
220 220
221 221 log.debug('Default reviewers processing finished')
222 222
223 223 return {
224 224 'commits': commits,
225 225 'files': all_files_changes,
226 226 'stats': stats,
227 227 'ancestor': ancestor_id,
228 228 # original authors of modified files
229 229 'original_authors': {
230 230 'users': user_counts,
231 231 'authors': author_counts,
232 232 'emails': email_counts,
233 233 },
234 234 'commit_authors': commit_authors
235 235 }
236 236
237 237
238 238 class PullRequestModel(BaseModel):
239 239
240 240 cls = PullRequest
241 241
242 242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
243 243
244 244 UPDATE_STATUS_MESSAGES = {
245 245 UpdateFailureReason.NONE: lazy_ugettext(
246 246 'Pull request update successful.'),
247 247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
248 248 'Pull request update failed because of an unknown error.'),
249 249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
250 250 'No update needed because the source and target have not changed.'),
251 251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
252 252 'Pull request cannot be updated because the reference type is '
253 253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
254 254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
255 255 'This pull request cannot be updated because the target '
256 256 'reference is missing.'),
257 257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
258 258 'This pull request cannot be updated because the source '
259 259 'reference is missing.'),
260 260 }
261 261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
262 262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
263 263
264 264 def __get_pull_request(self, pull_request):
265 265 return self._get_instance((
266 266 PullRequest, PullRequestVersion), pull_request)
267 267
268 268 def _check_perms(self, perms, pull_request, user, api=False):
269 269 if not api:
270 270 return h.HasRepoPermissionAny(*perms)(
271 271 user=user, repo_name=pull_request.target_repo.repo_name)
272 272 else:
273 273 return h.HasRepoPermissionAnyApi(*perms)(
274 274 user=user, repo_name=pull_request.target_repo.repo_name)
275 275
276 276 def check_user_read(self, pull_request, user, api=False):
277 277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
278 278 return self._check_perms(_perms, pull_request, user, api)
279 279
280 280 def check_user_merge(self, pull_request, user, api=False):
281 281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
282 282 return self._check_perms(_perms, pull_request, user, api)
283 283
284 284 def check_user_update(self, pull_request, user, api=False):
285 285 owner = user.user_id == pull_request.user_id
286 286 return self.check_user_merge(pull_request, user, api) or owner
287 287
288 288 def check_user_delete(self, pull_request, user):
289 289 owner = user.user_id == pull_request.user_id
290 290 _perms = ('repository.admin',)
291 291 return self._check_perms(_perms, pull_request, user) or owner
292 292
293 293 def is_user_reviewer(self, pull_request, user):
294 294 return user.user_id in [
295 295 x.user_id for x in
296 296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
297 297 if x.user
298 298 ]
299 299
300 300 def check_user_change_status(self, pull_request, user, api=False):
301 301 return self.check_user_update(pull_request, user, api) \
302 302 or self.is_user_reviewer(pull_request, user)
303 303
304 304 def check_user_comment(self, pull_request, user):
305 305 owner = user.user_id == pull_request.user_id
306 306 return self.check_user_read(pull_request, user) or owner
307 307
308 308 def get(self, pull_request):
309 309 return self.__get_pull_request(pull_request)
310 310
311 311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
312 312 statuses=None, opened_by=None, order_by=None,
313 313 order_dir='desc', only_created=False):
314 314 repo = None
315 315 if repo_name:
316 316 repo = self._get_repo(repo_name)
317 317
318 318 q = PullRequest.query()
319 319
320 320 if search_q:
321 321 like_expression = u'%{}%'.format(safe_unicode(search_q))
322 322 q = q.join(User)
323 323 q = q.filter(or_(
324 324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
325 325 User.username.ilike(like_expression),
326 326 PullRequest.title.ilike(like_expression),
327 327 PullRequest.description.ilike(like_expression),
328 328 ))
329 329
330 330 # source or target
331 331 if repo and source:
332 332 q = q.filter(PullRequest.source_repo == repo)
333 333 elif repo:
334 334 q = q.filter(PullRequest.target_repo == repo)
335 335
336 336 # closed,opened
337 337 if statuses:
338 338 q = q.filter(PullRequest.status.in_(statuses))
339 339
340 340 # opened by filter
341 341 if opened_by:
342 342 q = q.filter(PullRequest.user_id.in_(opened_by))
343 343
344 344 # only get those that are in "created" state
345 345 if only_created:
346 346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'id': PullRequest.pull_request_id,
352 352 'title': PullRequest.title,
353 353 'updated_on_raw': PullRequest.updated_on,
354 354 'target_repo': PullRequest.target_repo_id
355 355 }
356 356 if order_dir == 'asc':
357 357 q = q.order_by(order_map[order_by].asc())
358 358 else:
359 359 q = q.order_by(order_map[order_by].desc())
360 360
361 361 return q
362 362
363 363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
364 364 opened_by=None):
365 365 """
366 366 Count the number of pull requests for a specific repository.
367 367
368 368 :param repo_name: target or source repo
369 369 :param search_q: filter by text
370 370 :param source: boolean flag to specify if repo_name refers to source
371 371 :param statuses: list of pull request statuses
372 372 :param opened_by: author user of the pull request
373 373 :returns: int number of pull requests
374 374 """
375 375 q = self._prepare_get_all_query(
376 376 repo_name, search_q=search_q, source=source, statuses=statuses,
377 377 opened_by=opened_by)
378 378
379 379 return q.count()
380 380
381 381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
382 382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
383 383 """
384 384 Get all pull requests for a specific repository.
385 385
386 386 :param repo_name: target or source repo
387 387 :param search_q: filter by text
388 388 :param source: boolean flag to specify if repo_name refers to source
389 389 :param statuses: list of pull request statuses
390 390 :param opened_by: author user of the pull request
391 391 :param offset: pagination offset
392 392 :param length: length of returned list
393 393 :param order_by: order of the returned list
394 394 :param order_dir: 'asc' or 'desc' ordering direction
395 395 :returns: list of pull requests
396 396 """
397 397 q = self._prepare_get_all_query(
398 398 repo_name, search_q=search_q, source=source, statuses=statuses,
399 399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
400 400
401 401 if length:
402 402 pull_requests = q.limit(length).offset(offset).all()
403 403 else:
404 404 pull_requests = q.all()
405 405
406 406 return pull_requests
407 407
408 408 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
409 409 opened_by=None):
410 410 """
411 411 Count the number of pull requests for a specific repository that are
412 412 awaiting review.
413 413
414 414 :param repo_name: target or source repo
415 415 :param search_q: filter by text
416 416 :param source: boolean flag to specify if repo_name refers to source
417 417 :param statuses: list of pull request statuses
418 418 :param opened_by: author user of the pull request
419 419 :returns: int number of pull requests
420 420 """
421 421 pull_requests = self.get_awaiting_review(
422 422 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
423 423
424 424 return len(pull_requests)
425 425
426 426 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
427 427 opened_by=None, offset=0, length=None,
428 428 order_by=None, order_dir='desc'):
429 429 """
430 430 Get all pull requests for a specific repository that are awaiting
431 431 review.
432 432
433 433 :param repo_name: target or source repo
434 434 :param search_q: filter by text
435 435 :param source: boolean flag to specify if repo_name refers to source
436 436 :param statuses: list of pull request statuses
437 437 :param opened_by: author user of the pull request
438 438 :param offset: pagination offset
439 439 :param length: length of returned list
440 440 :param order_by: order of the returned list
441 441 :param order_dir: 'asc' or 'desc' ordering direction
442 442 :returns: list of pull requests
443 443 """
444 444 pull_requests = self.get_all(
445 445 repo_name, search_q=search_q, source=source, statuses=statuses,
446 446 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
447 447
448 448 _filtered_pull_requests = []
449 449 for pr in pull_requests:
450 450 status = pr.calculated_review_status()
451 451 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
452 452 ChangesetStatus.STATUS_UNDER_REVIEW]:
453 453 _filtered_pull_requests.append(pr)
454 454 if length:
455 455 return _filtered_pull_requests[offset:offset+length]
456 456 else:
457 457 return _filtered_pull_requests
458 458
459 459 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
460 460 opened_by=None, user_id=None):
461 461 """
462 462 Count the number of pull requests for a specific repository that are
463 463 awaiting review from a specific user.
464 464
465 465 :param repo_name: target or source repo
466 466 :param search_q: filter by text
467 467 :param source: boolean flag to specify if repo_name refers to source
468 468 :param statuses: list of pull request statuses
469 469 :param opened_by: author user of the pull request
470 470 :param user_id: reviewer user of the pull request
471 471 :returns: int number of pull requests
472 472 """
473 473 pull_requests = self.get_awaiting_my_review(
474 474 repo_name, search_q=search_q, source=source, statuses=statuses,
475 475 opened_by=opened_by, user_id=user_id)
476 476
477 477 return len(pull_requests)
478 478
479 479 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
480 480 opened_by=None, user_id=None, offset=0,
481 481 length=None, order_by=None, order_dir='desc'):
482 482 """
483 483 Get all pull requests for a specific repository that are awaiting
484 484 review from a specific user.
485 485
486 486 :param repo_name: target or source repo
487 487 :param search_q: filter by text
488 488 :param source: boolean flag to specify if repo_name refers to source
489 489 :param statuses: list of pull request statuses
490 490 :param opened_by: author user of the pull request
491 491 :param user_id: reviewer user of the pull request
492 492 :param offset: pagination offset
493 493 :param length: length of returned list
494 494 :param order_by: order of the returned list
495 495 :param order_dir: 'asc' or 'desc' ordering direction
496 496 :returns: list of pull requests
497 497 """
498 498 pull_requests = self.get_all(
499 499 repo_name, search_q=search_q, source=source, statuses=statuses,
500 500 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
501 501
502 502 _my = PullRequestModel().get_not_reviewed(user_id)
503 503 my_participation = []
504 504 for pr in pull_requests:
505 505 if pr in _my:
506 506 my_participation.append(pr)
507 507 _filtered_pull_requests = my_participation
508 508 if length:
509 509 return _filtered_pull_requests[offset:offset+length]
510 510 else:
511 511 return _filtered_pull_requests
512 512
513 513 def get_not_reviewed(self, user_id):
514 514 return [
515 515 x.pull_request for x in PullRequestReviewers.query().filter(
516 516 PullRequestReviewers.user_id == user_id).all()
517 517 ]
518 518
519 519 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
520 520 order_by=None, order_dir='desc'):
521 521 q = PullRequest.query()
522 522 if user_id:
523 523 reviewers_subquery = Session().query(
524 524 PullRequestReviewers.pull_request_id).filter(
525 525 PullRequestReviewers.user_id == user_id).subquery()
526 526 user_filter = or_(
527 527 PullRequest.user_id == user_id,
528 528 PullRequest.pull_request_id.in_(reviewers_subquery)
529 529 )
530 530 q = PullRequest.query().filter(user_filter)
531 531
532 532 # closed,opened
533 533 if statuses:
534 534 q = q.filter(PullRequest.status.in_(statuses))
535 535
536 536 if query:
537 537 like_expression = u'%{}%'.format(safe_unicode(query))
538 538 q = q.join(User)
539 539 q = q.filter(or_(
540 540 cast(PullRequest.pull_request_id, String).ilike(like_expression),
541 541 User.username.ilike(like_expression),
542 542 PullRequest.title.ilike(like_expression),
543 543 PullRequest.description.ilike(like_expression),
544 544 ))
545 545 if order_by:
546 546 order_map = {
547 547 'name_raw': PullRequest.pull_request_id,
548 548 'title': PullRequest.title,
549 549 'updated_on_raw': PullRequest.updated_on,
550 550 'target_repo': PullRequest.target_repo_id
551 551 }
552 552 if order_dir == 'asc':
553 553 q = q.order_by(order_map[order_by].asc())
554 554 else:
555 555 q = q.order_by(order_map[order_by].desc())
556 556
557 557 return q
558 558
559 559 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
560 560 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
561 561 return q.count()
562 562
563 563 def get_im_participating_in(
564 564 self, user_id=None, statuses=None, query='', offset=0,
565 565 length=None, order_by=None, order_dir='desc'):
566 566 """
567 567 Get all Pull requests that i'm participating in, or i have opened
568 568 """
569 569
570 570 q = self._prepare_participating_query(
571 571 user_id, statuses=statuses, query=query, order_by=order_by,
572 572 order_dir=order_dir)
573 573
574 574 if length:
575 575 pull_requests = q.limit(length).offset(offset).all()
576 576 else:
577 577 pull_requests = q.all()
578 578
579 579 return pull_requests
580 580
581 581 def get_versions(self, pull_request):
582 582 """
583 583 returns version of pull request sorted by ID descending
584 584 """
585 585 return PullRequestVersion.query()\
586 586 .filter(PullRequestVersion.pull_request == pull_request)\
587 587 .order_by(PullRequestVersion.pull_request_version_id.asc())\
588 588 .all()
589 589
590 590 def get_pr_version(self, pull_request_id, version=None):
591 591 at_version = None
592 592
593 593 if version and version == 'latest':
594 594 pull_request_ver = PullRequest.get(pull_request_id)
595 595 pull_request_obj = pull_request_ver
596 596 _org_pull_request_obj = pull_request_obj
597 597 at_version = 'latest'
598 598 elif version:
599 599 pull_request_ver = PullRequestVersion.get_or_404(version)
600 600 pull_request_obj = pull_request_ver
601 601 _org_pull_request_obj = pull_request_ver.pull_request
602 602 at_version = pull_request_ver.pull_request_version_id
603 603 else:
604 604 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
605 605 pull_request_id)
606 606
607 607 pull_request_display_obj = PullRequest.get_pr_display_object(
608 608 pull_request_obj, _org_pull_request_obj)
609 609
610 610 return _org_pull_request_obj, pull_request_obj, \
611 611 pull_request_display_obj, at_version
612 612
613 def pr_commits_versions(self, versions):
614 """
615 Maps the pull-request commits into all known PR versions. This way we can obtain
616 each pr version the commit was introduced in.
617 """
618 commit_versions = collections.defaultdict(list)
619 num_versions = [x.pull_request_version_id for x in versions]
620 for ver in versions:
621 for commit_id in ver.revisions:
622 ver_idx = ChangesetComment.get_index_from_version(
623 ver.pull_request_version_id, num_versions=num_versions)
624 commit_versions[commit_id].append(ver_idx)
625 return commit_versions
626
613 627 def create(self, created_by, source_repo, source_ref, target_repo,
614 628 target_ref, revisions, reviewers, observers, title, description=None,
615 629 common_ancestor_id=None,
616 630 description_renderer=None,
617 631 reviewer_data=None, translator=None, auth_user=None):
618 632 translator = translator or get_current_request().translate
619 633
620 634 created_by_user = self._get_user(created_by)
621 635 auth_user = auth_user or created_by_user.AuthUser()
622 636 source_repo = self._get_repo(source_repo)
623 637 target_repo = self._get_repo(target_repo)
624 638
625 639 pull_request = PullRequest()
626 640 pull_request.source_repo = source_repo
627 641 pull_request.source_ref = source_ref
628 642 pull_request.target_repo = target_repo
629 643 pull_request.target_ref = target_ref
630 644 pull_request.revisions = revisions
631 645 pull_request.title = title
632 646 pull_request.description = description
633 647 pull_request.description_renderer = description_renderer
634 648 pull_request.author = created_by_user
635 649 pull_request.reviewer_data = reviewer_data
636 650 pull_request.pull_request_state = pull_request.STATE_CREATING
637 651 pull_request.common_ancestor_id = common_ancestor_id
638 652
639 653 Session().add(pull_request)
640 654 Session().flush()
641 655
642 656 reviewer_ids = set()
643 657 # members / reviewers
644 658 for reviewer_object in reviewers:
645 659 user_id, reasons, mandatory, role, rules = reviewer_object
646 660 user = self._get_user(user_id)
647 661
648 662 # skip duplicates
649 663 if user.user_id in reviewer_ids:
650 664 continue
651 665
652 666 reviewer_ids.add(user.user_id)
653 667
654 668 reviewer = PullRequestReviewers()
655 669 reviewer.user = user
656 670 reviewer.pull_request = pull_request
657 671 reviewer.reasons = reasons
658 672 reviewer.mandatory = mandatory
659 673 reviewer.role = role
660 674
661 675 # NOTE(marcink): pick only first rule for now
662 676 rule_id = list(rules)[0] if rules else None
663 677 rule = RepoReviewRule.get(rule_id) if rule_id else None
664 678 if rule:
665 679 review_group = rule.user_group_vote_rule(user_id)
666 680 # we check if this particular reviewer is member of a voting group
667 681 if review_group:
668 682 # NOTE(marcink):
669 683 # can be that user is member of more but we pick the first same,
670 684 # same as default reviewers algo
671 685 review_group = review_group[0]
672 686
673 687 rule_data = {
674 688 'rule_name':
675 689 rule.review_rule_name,
676 690 'rule_user_group_entry_id':
677 691 review_group.repo_review_rule_users_group_id,
678 692 'rule_user_group_name':
679 693 review_group.users_group.users_group_name,
680 694 'rule_user_group_members':
681 695 [x.user.username for x in review_group.users_group.members],
682 696 'rule_user_group_members_id':
683 697 [x.user.user_id for x in review_group.users_group.members],
684 698 }
685 699 # e.g {'vote_rule': -1, 'mandatory': True}
686 700 rule_data.update(review_group.rule_data())
687 701
688 702 reviewer.rule_data = rule_data
689 703
690 704 Session().add(reviewer)
691 705 Session().flush()
692 706
693 707 for observer_object in observers:
694 708 user_id, reasons, mandatory, role, rules = observer_object
695 709 user = self._get_user(user_id)
696 710
697 711 # skip duplicates from reviewers
698 712 if user.user_id in reviewer_ids:
699 713 continue
700 714
701 715 #reviewer_ids.add(user.user_id)
702 716
703 717 observer = PullRequestReviewers()
704 718 observer.user = user
705 719 observer.pull_request = pull_request
706 720 observer.reasons = reasons
707 721 observer.mandatory = mandatory
708 722 observer.role = role
709 723
710 724 # NOTE(marcink): pick only first rule for now
711 725 rule_id = list(rules)[0] if rules else None
712 726 rule = RepoReviewRule.get(rule_id) if rule_id else None
713 727 if rule:
714 728 # TODO(marcink): do we need this for observers ??
715 729 pass
716 730
717 731 Session().add(observer)
718 732 Session().flush()
719 733
720 734 # Set approval status to "Under Review" for all commits which are
721 735 # part of this pull request.
722 736 ChangesetStatusModel().set_status(
723 737 repo=target_repo,
724 738 status=ChangesetStatus.STATUS_UNDER_REVIEW,
725 739 user=created_by_user,
726 740 pull_request=pull_request
727 741 )
728 742 # we commit early at this point. This has to do with a fact
729 743 # that before queries do some row-locking. And because of that
730 744 # we need to commit and finish transaction before below validate call
731 745 # that for large repos could be long resulting in long row locks
732 746 Session().commit()
733 747
734 748 # prepare workspace, and run initial merge simulation. Set state during that
735 749 # operation
736 750 pull_request = PullRequest.get(pull_request.pull_request_id)
737 751
738 752 # set as merging, for merge simulation, and if finished to created so we mark
739 753 # simulation is working fine
740 754 with pull_request.set_state(PullRequest.STATE_MERGING,
741 755 final_state=PullRequest.STATE_CREATED) as state_obj:
742 756 MergeCheck.validate(
743 757 pull_request, auth_user=auth_user, translator=translator)
744 758
745 759 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
746 760 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
747 761
748 762 creation_data = pull_request.get_api_data(with_merge_state=False)
749 763 self._log_audit_action(
750 764 'repo.pull_request.create', {'data': creation_data},
751 765 auth_user, pull_request)
752 766
753 767 return pull_request
754 768
755 769 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
756 770 pull_request = self.__get_pull_request(pull_request)
757 771 target_scm = pull_request.target_repo.scm_instance()
758 772 if action == 'create':
759 773 trigger_hook = hooks_utils.trigger_create_pull_request_hook
760 774 elif action == 'merge':
761 775 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
762 776 elif action == 'close':
763 777 trigger_hook = hooks_utils.trigger_close_pull_request_hook
764 778 elif action == 'review_status_change':
765 779 trigger_hook = hooks_utils.trigger_review_pull_request_hook
766 780 elif action == 'update':
767 781 trigger_hook = hooks_utils.trigger_update_pull_request_hook
768 782 elif action == 'comment':
769 783 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
770 784 elif action == 'comment_edit':
771 785 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
772 786 else:
773 787 return
774 788
775 789 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
776 790 pull_request, action, trigger_hook)
777 791 trigger_hook(
778 792 username=user.username,
779 793 repo_name=pull_request.target_repo.repo_name,
780 794 repo_type=target_scm.alias,
781 795 pull_request=pull_request,
782 796 data=data)
783 797
784 798 def _get_commit_ids(self, pull_request):
785 799 """
786 800 Return the commit ids of the merged pull request.
787 801
788 802 This method is not dealing correctly yet with the lack of autoupdates
789 803 nor with the implicit target updates.
790 804 For example: if a commit in the source repo is already in the target it
791 805 will be reported anyways.
792 806 """
793 807 merge_rev = pull_request.merge_rev
794 808 if merge_rev is None:
795 809 raise ValueError('This pull request was not merged yet')
796 810
797 811 commit_ids = list(pull_request.revisions)
798 812 if merge_rev not in commit_ids:
799 813 commit_ids.append(merge_rev)
800 814
801 815 return commit_ids
802 816
803 817 def merge_repo(self, pull_request, user, extras):
804 818 log.debug("Merging pull request %s", pull_request.pull_request_id)
805 819 extras['user_agent'] = 'internal-merge'
806 820 merge_state = self._merge_pull_request(pull_request, user, extras)
807 821 if merge_state.executed:
808 822 log.debug("Merge was successful, updating the pull request comments.")
809 823 self._comment_and_close_pr(pull_request, user, merge_state)
810 824
811 825 self._log_audit_action(
812 826 'repo.pull_request.merge',
813 827 {'merge_state': merge_state.__dict__},
814 828 user, pull_request)
815 829
816 830 else:
817 831 log.warn("Merge failed, not updating the pull request.")
818 832 return merge_state
819 833
820 834 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
821 835 target_vcs = pull_request.target_repo.scm_instance()
822 836 source_vcs = pull_request.source_repo.scm_instance()
823 837
824 838 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
825 839 pr_id=pull_request.pull_request_id,
826 840 pr_title=pull_request.title,
827 841 source_repo=source_vcs.name,
828 842 source_ref_name=pull_request.source_ref_parts.name,
829 843 target_repo=target_vcs.name,
830 844 target_ref_name=pull_request.target_ref_parts.name,
831 845 )
832 846
833 847 workspace_id = self._workspace_id(pull_request)
834 848 repo_id = pull_request.target_repo.repo_id
835 849 use_rebase = self._use_rebase_for_merging(pull_request)
836 850 close_branch = self._close_branch_before_merging(pull_request)
837 851 user_name = self._user_name_for_merging(pull_request, user)
838 852
839 853 target_ref = self._refresh_reference(
840 854 pull_request.target_ref_parts, target_vcs)
841 855
842 856 callback_daemon, extras = prepare_callback_daemon(
843 857 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
844 858 host=vcs_settings.HOOKS_HOST,
845 859 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
846 860
847 861 with callback_daemon:
848 862 # TODO: johbo: Implement a clean way to run a config_override
849 863 # for a single call.
850 864 target_vcs.config.set(
851 865 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
852 866
853 867 merge_state = target_vcs.merge(
854 868 repo_id, workspace_id, target_ref, source_vcs,
855 869 pull_request.source_ref_parts,
856 870 user_name=user_name, user_email=user.email,
857 871 message=message, use_rebase=use_rebase,
858 872 close_branch=close_branch)
859 873 return merge_state
860 874
861 875 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
862 876 pull_request.merge_rev = merge_state.merge_ref.commit_id
863 877 pull_request.updated_on = datetime.datetime.now()
864 878 close_msg = close_msg or 'Pull request merged and closed'
865 879
866 880 CommentsModel().create(
867 881 text=safe_unicode(close_msg),
868 882 repo=pull_request.target_repo.repo_id,
869 883 user=user.user_id,
870 884 pull_request=pull_request.pull_request_id,
871 885 f_path=None,
872 886 line_no=None,
873 887 closing_pr=True
874 888 )
875 889
876 890 Session().add(pull_request)
877 891 Session().flush()
878 892 # TODO: paris: replace invalidation with less radical solution
879 893 ScmModel().mark_for_invalidation(
880 894 pull_request.target_repo.repo_name)
881 895 self.trigger_pull_request_hook(pull_request, user, 'merge')
882 896
883 897 def has_valid_update_type(self, pull_request):
884 898 source_ref_type = pull_request.source_ref_parts.type
885 899 return source_ref_type in self.REF_TYPES
886 900
887 901 def get_flow_commits(self, pull_request):
888 902
889 903 # source repo
890 904 source_ref_name = pull_request.source_ref_parts.name
891 905 source_ref_type = pull_request.source_ref_parts.type
892 906 source_ref_id = pull_request.source_ref_parts.commit_id
893 907 source_repo = pull_request.source_repo.scm_instance()
894 908
895 909 try:
896 910 if source_ref_type in self.REF_TYPES:
897 911 source_commit = source_repo.get_commit(source_ref_name)
898 912 else:
899 913 source_commit = source_repo.get_commit(source_ref_id)
900 914 except CommitDoesNotExistError:
901 915 raise SourceRefMissing()
902 916
903 917 # target repo
904 918 target_ref_name = pull_request.target_ref_parts.name
905 919 target_ref_type = pull_request.target_ref_parts.type
906 920 target_ref_id = pull_request.target_ref_parts.commit_id
907 921 target_repo = pull_request.target_repo.scm_instance()
908 922
909 923 try:
910 924 if target_ref_type in self.REF_TYPES:
911 925 target_commit = target_repo.get_commit(target_ref_name)
912 926 else:
913 927 target_commit = target_repo.get_commit(target_ref_id)
914 928 except CommitDoesNotExistError:
915 929 raise TargetRefMissing()
916 930
917 931 return source_commit, target_commit
918 932
919 933 def update_commits(self, pull_request, updating_user):
920 934 """
921 935 Get the updated list of commits for the pull request
922 936 and return the new pull request version and the list
923 937 of commits processed by this update action
924 938
925 939 updating_user is the user_object who triggered the update
926 940 """
927 941 pull_request = self.__get_pull_request(pull_request)
928 942 source_ref_type = pull_request.source_ref_parts.type
929 943 source_ref_name = pull_request.source_ref_parts.name
930 944 source_ref_id = pull_request.source_ref_parts.commit_id
931 945
932 946 target_ref_type = pull_request.target_ref_parts.type
933 947 target_ref_name = pull_request.target_ref_parts.name
934 948 target_ref_id = pull_request.target_ref_parts.commit_id
935 949
936 950 if not self.has_valid_update_type(pull_request):
937 951 log.debug("Skipping update of pull request %s due to ref type: %s",
938 952 pull_request, source_ref_type)
939 953 return UpdateResponse(
940 954 executed=False,
941 955 reason=UpdateFailureReason.WRONG_REF_TYPE,
942 956 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
943 957 source_changed=False, target_changed=False)
944 958
945 959 try:
946 960 source_commit, target_commit = self.get_flow_commits(pull_request)
947 961 except SourceRefMissing:
948 962 return UpdateResponse(
949 963 executed=False,
950 964 reason=UpdateFailureReason.MISSING_SOURCE_REF,
951 965 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
952 966 source_changed=False, target_changed=False)
953 967 except TargetRefMissing:
954 968 return UpdateResponse(
955 969 executed=False,
956 970 reason=UpdateFailureReason.MISSING_TARGET_REF,
957 971 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
958 972 source_changed=False, target_changed=False)
959 973
960 974 source_changed = source_ref_id != source_commit.raw_id
961 975 target_changed = target_ref_id != target_commit.raw_id
962 976
963 977 if not (source_changed or target_changed):
964 978 log.debug("Nothing changed in pull request %s", pull_request)
965 979 return UpdateResponse(
966 980 executed=False,
967 981 reason=UpdateFailureReason.NO_CHANGE,
968 982 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
969 983 source_changed=target_changed, target_changed=source_changed)
970 984
971 985 change_in_found = 'target repo' if target_changed else 'source repo'
972 986 log.debug('Updating pull request because of change in %s detected',
973 987 change_in_found)
974 988
975 989 # Finally there is a need for an update, in case of source change
976 990 # we create a new version, else just an update
977 991 if source_changed:
978 992 pull_request_version = self._create_version_from_snapshot(pull_request)
979 993 self._link_comments_to_version(pull_request_version)
980 994 else:
981 995 try:
982 996 ver = pull_request.versions[-1]
983 997 except IndexError:
984 998 ver = None
985 999
986 1000 pull_request.pull_request_version_id = \
987 1001 ver.pull_request_version_id if ver else None
988 1002 pull_request_version = pull_request
989 1003
990 1004 source_repo = pull_request.source_repo.scm_instance()
991 1005 target_repo = pull_request.target_repo.scm_instance()
992 1006
993 1007 # re-compute commit ids
994 1008 old_commit_ids = pull_request.revisions
995 1009 pre_load = ["author", "date", "message", "branch"]
996 1010 commit_ranges = target_repo.compare(
997 1011 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
998 1012 pre_load=pre_load)
999 1013
1000 1014 target_ref = target_commit.raw_id
1001 1015 source_ref = source_commit.raw_id
1002 1016 ancestor_commit_id = target_repo.get_common_ancestor(
1003 1017 target_ref, source_ref, source_repo)
1004 1018
1005 1019 if not ancestor_commit_id:
1006 1020 raise ValueError(
1007 1021 'cannot calculate diff info without a common ancestor. '
1008 1022 'Make sure both repositories are related, and have a common forking commit.')
1009 1023
1010 1024 pull_request.common_ancestor_id = ancestor_commit_id
1011 1025
1012 1026 pull_request.source_ref = '%s:%s:%s' % (
1013 1027 source_ref_type, source_ref_name, source_commit.raw_id)
1014 1028 pull_request.target_ref = '%s:%s:%s' % (
1015 1029 target_ref_type, target_ref_name, ancestor_commit_id)
1016 1030
1017 1031 pull_request.revisions = [
1018 1032 commit.raw_id for commit in reversed(commit_ranges)]
1019 1033 pull_request.updated_on = datetime.datetime.now()
1020 1034 Session().add(pull_request)
1021 1035 new_commit_ids = pull_request.revisions
1022 1036
1023 1037 old_diff_data, new_diff_data = self._generate_update_diffs(
1024 1038 pull_request, pull_request_version)
1025 1039
1026 1040 # calculate commit and file changes
1027 1041 commit_changes = self._calculate_commit_id_changes(
1028 1042 old_commit_ids, new_commit_ids)
1029 1043 file_changes = self._calculate_file_changes(
1030 1044 old_diff_data, new_diff_data)
1031 1045
1032 1046 # set comments as outdated if DIFFS changed
1033 1047 CommentsModel().outdate_comments(
1034 1048 pull_request, old_diff_data=old_diff_data,
1035 1049 new_diff_data=new_diff_data)
1036 1050
1037 1051 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1038 1052 file_node_changes = (
1039 1053 file_changes.added or file_changes.modified or file_changes.removed)
1040 1054 pr_has_changes = valid_commit_changes or file_node_changes
1041 1055
1042 1056 # Add an automatic comment to the pull request, in case
1043 1057 # anything has changed
1044 1058 if pr_has_changes:
1045 1059 update_comment = CommentsModel().create(
1046 1060 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1047 1061 repo=pull_request.target_repo,
1048 1062 user=pull_request.author,
1049 1063 pull_request=pull_request,
1050 1064 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1051 1065
1052 1066 # Update status to "Under Review" for added commits
1053 1067 for commit_id in commit_changes.added:
1054 1068 ChangesetStatusModel().set_status(
1055 1069 repo=pull_request.source_repo,
1056 1070 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1057 1071 comment=update_comment,
1058 1072 user=pull_request.author,
1059 1073 pull_request=pull_request,
1060 1074 revision=commit_id)
1061 1075
1062 1076 # send update email to users
1063 1077 try:
1064 1078 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1065 1079 ancestor_commit_id=ancestor_commit_id,
1066 1080 commit_changes=commit_changes,
1067 1081 file_changes=file_changes)
1068 1082 except Exception:
1069 1083 log.exception('Failed to send email notification to users')
1070 1084
1071 1085 log.debug(
1072 1086 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1073 1087 'removed_ids: %s', pull_request.pull_request_id,
1074 1088 commit_changes.added, commit_changes.common, commit_changes.removed)
1075 1089 log.debug(
1076 1090 'Updated pull request with the following file changes: %s',
1077 1091 file_changes)
1078 1092
1079 1093 log.info(
1080 1094 "Updated pull request %s from commit %s to commit %s, "
1081 1095 "stored new version %s of this pull request.",
1082 1096 pull_request.pull_request_id, source_ref_id,
1083 1097 pull_request.source_ref_parts.commit_id,
1084 1098 pull_request_version.pull_request_version_id)
1085 1099 Session().commit()
1086 1100 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1087 1101
1088 1102 return UpdateResponse(
1089 1103 executed=True, reason=UpdateFailureReason.NONE,
1090 1104 old=pull_request, new=pull_request_version,
1091 1105 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1092 1106 source_changed=source_changed, target_changed=target_changed)
1093 1107
1094 1108 def _create_version_from_snapshot(self, pull_request):
1095 1109 version = PullRequestVersion()
1096 1110 version.title = pull_request.title
1097 1111 version.description = pull_request.description
1098 1112 version.status = pull_request.status
1099 1113 version.pull_request_state = pull_request.pull_request_state
1100 1114 version.created_on = datetime.datetime.now()
1101 1115 version.updated_on = pull_request.updated_on
1102 1116 version.user_id = pull_request.user_id
1103 1117 version.source_repo = pull_request.source_repo
1104 1118 version.source_ref = pull_request.source_ref
1105 1119 version.target_repo = pull_request.target_repo
1106 1120 version.target_ref = pull_request.target_ref
1107 1121
1108 1122 version._last_merge_source_rev = pull_request._last_merge_source_rev
1109 1123 version._last_merge_target_rev = pull_request._last_merge_target_rev
1110 1124 version.last_merge_status = pull_request.last_merge_status
1111 1125 version.last_merge_metadata = pull_request.last_merge_metadata
1112 1126 version.shadow_merge_ref = pull_request.shadow_merge_ref
1113 1127 version.merge_rev = pull_request.merge_rev
1114 1128 version.reviewer_data = pull_request.reviewer_data
1115 1129
1116 1130 version.revisions = pull_request.revisions
1117 1131 version.common_ancestor_id = pull_request.common_ancestor_id
1118 1132 version.pull_request = pull_request
1119 1133 Session().add(version)
1120 1134 Session().flush()
1121 1135
1122 1136 return version
1123 1137
1124 1138 def _generate_update_diffs(self, pull_request, pull_request_version):
1125 1139
1126 1140 diff_context = (
1127 1141 self.DIFF_CONTEXT +
1128 1142 CommentsModel.needed_extra_diff_context())
1129 1143 hide_whitespace_changes = False
1130 1144 source_repo = pull_request_version.source_repo
1131 1145 source_ref_id = pull_request_version.source_ref_parts.commit_id
1132 1146 target_ref_id = pull_request_version.target_ref_parts.commit_id
1133 1147 old_diff = self._get_diff_from_pr_or_version(
1134 1148 source_repo, source_ref_id, target_ref_id,
1135 1149 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1136 1150
1137 1151 source_repo = pull_request.source_repo
1138 1152 source_ref_id = pull_request.source_ref_parts.commit_id
1139 1153 target_ref_id = pull_request.target_ref_parts.commit_id
1140 1154
1141 1155 new_diff = self._get_diff_from_pr_or_version(
1142 1156 source_repo, source_ref_id, target_ref_id,
1143 1157 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1144 1158
1145 1159 old_diff_data = diffs.DiffProcessor(old_diff)
1146 1160 old_diff_data.prepare()
1147 1161 new_diff_data = diffs.DiffProcessor(new_diff)
1148 1162 new_diff_data.prepare()
1149 1163
1150 1164 return old_diff_data, new_diff_data
1151 1165
1152 1166 def _link_comments_to_version(self, pull_request_version):
1153 1167 """
1154 1168 Link all unlinked comments of this pull request to the given version.
1155 1169
1156 1170 :param pull_request_version: The `PullRequestVersion` to which
1157 1171 the comments shall be linked.
1158 1172
1159 1173 """
1160 1174 pull_request = pull_request_version.pull_request
1161 1175 comments = ChangesetComment.query()\
1162 1176 .filter(
1163 1177 # TODO: johbo: Should we query for the repo at all here?
1164 1178 # Pending decision on how comments of PRs are to be related
1165 1179 # to either the source repo, the target repo or no repo at all.
1166 1180 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1167 1181 ChangesetComment.pull_request == pull_request,
1168 1182 ChangesetComment.pull_request_version == None)\
1169 1183 .order_by(ChangesetComment.comment_id.asc())
1170 1184
1171 1185 # TODO: johbo: Find out why this breaks if it is done in a bulk
1172 1186 # operation.
1173 1187 for comment in comments:
1174 1188 comment.pull_request_version_id = (
1175 1189 pull_request_version.pull_request_version_id)
1176 1190 Session().add(comment)
1177 1191
1178 1192 def _calculate_commit_id_changes(self, old_ids, new_ids):
1179 1193 added = [x for x in new_ids if x not in old_ids]
1180 1194 common = [x for x in new_ids if x in old_ids]
1181 1195 removed = [x for x in old_ids if x not in new_ids]
1182 1196 total = new_ids
1183 1197 return ChangeTuple(added, common, removed, total)
1184 1198
1185 1199 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1186 1200
1187 1201 old_files = OrderedDict()
1188 1202 for diff_data in old_diff_data.parsed_diff:
1189 1203 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1190 1204
1191 1205 added_files = []
1192 1206 modified_files = []
1193 1207 removed_files = []
1194 1208 for diff_data in new_diff_data.parsed_diff:
1195 1209 new_filename = diff_data['filename']
1196 1210 new_hash = md5_safe(diff_data['raw_diff'])
1197 1211
1198 1212 old_hash = old_files.get(new_filename)
1199 1213 if not old_hash:
1200 1214 # file is not present in old diff, we have to figure out from parsed diff
1201 1215 # operation ADD/REMOVE
1202 1216 operations_dict = diff_data['stats']['ops']
1203 1217 if diffs.DEL_FILENODE in operations_dict:
1204 1218 removed_files.append(new_filename)
1205 1219 else:
1206 1220 added_files.append(new_filename)
1207 1221 else:
1208 1222 if new_hash != old_hash:
1209 1223 modified_files.append(new_filename)
1210 1224 # now remove a file from old, since we have seen it already
1211 1225 del old_files[new_filename]
1212 1226
1213 1227 # removed files is when there are present in old, but not in NEW,
1214 1228 # since we remove old files that are present in new diff, left-overs
1215 1229 # if any should be the removed files
1216 1230 removed_files.extend(old_files.keys())
1217 1231
1218 1232 return FileChangeTuple(added_files, modified_files, removed_files)
1219 1233
1220 1234 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1221 1235 """
1222 1236 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1223 1237 so it's always looking the same disregarding on which default
1224 1238 renderer system is using.
1225 1239
1226 1240 :param ancestor_commit_id: ancestor raw_id
1227 1241 :param changes: changes named tuple
1228 1242 :param file_changes: file changes named tuple
1229 1243
1230 1244 """
1231 1245 new_status = ChangesetStatus.get_status_lbl(
1232 1246 ChangesetStatus.STATUS_UNDER_REVIEW)
1233 1247
1234 1248 changed_files = (
1235 1249 file_changes.added + file_changes.modified + file_changes.removed)
1236 1250
1237 1251 params = {
1238 1252 'under_review_label': new_status,
1239 1253 'added_commits': changes.added,
1240 1254 'removed_commits': changes.removed,
1241 1255 'changed_files': changed_files,
1242 1256 'added_files': file_changes.added,
1243 1257 'modified_files': file_changes.modified,
1244 1258 'removed_files': file_changes.removed,
1245 1259 'ancestor_commit_id': ancestor_commit_id
1246 1260 }
1247 1261 renderer = RstTemplateRenderer()
1248 1262 return renderer.render('pull_request_update.mako', **params)
1249 1263
1250 1264 def edit(self, pull_request, title, description, description_renderer, user):
1251 1265 pull_request = self.__get_pull_request(pull_request)
1252 1266 old_data = pull_request.get_api_data(with_merge_state=False)
1253 1267 if pull_request.is_closed():
1254 1268 raise ValueError('This pull request is closed')
1255 1269 if title:
1256 1270 pull_request.title = title
1257 1271 pull_request.description = description
1258 1272 pull_request.updated_on = datetime.datetime.now()
1259 1273 pull_request.description_renderer = description_renderer
1260 1274 Session().add(pull_request)
1261 1275 self._log_audit_action(
1262 1276 'repo.pull_request.edit', {'old_data': old_data},
1263 1277 user, pull_request)
1264 1278
1265 1279 def update_reviewers(self, pull_request, reviewer_data, user):
1266 1280 """
1267 1281 Update the reviewers in the pull request
1268 1282
1269 1283 :param pull_request: the pr to update
1270 1284 :param reviewer_data: list of tuples
1271 1285 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1272 1286 :param user: current use who triggers this action
1273 1287 """
1274 1288
1275 1289 pull_request = self.__get_pull_request(pull_request)
1276 1290 if pull_request.is_closed():
1277 1291 raise ValueError('This pull request is closed')
1278 1292
1279 1293 reviewers = {}
1280 1294 for user_id, reasons, mandatory, role, rules in reviewer_data:
1281 1295 if isinstance(user_id, (int, compat.string_types)):
1282 1296 user_id = self._get_user(user_id).user_id
1283 1297 reviewers[user_id] = {
1284 1298 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1285 1299
1286 1300 reviewers_ids = set(reviewers.keys())
1287 1301 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1288 1302 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1289 1303
1290 1304 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1291 1305
1292 1306 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1293 1307 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1294 1308
1295 1309 log.debug("Adding %s reviewers", ids_to_add)
1296 1310 log.debug("Removing %s reviewers", ids_to_remove)
1297 1311 changed = False
1298 1312 added_audit_reviewers = []
1299 1313 removed_audit_reviewers = []
1300 1314
1301 1315 for uid in ids_to_add:
1302 1316 changed = True
1303 1317 _usr = self._get_user(uid)
1304 1318 reviewer = PullRequestReviewers()
1305 1319 reviewer.user = _usr
1306 1320 reviewer.pull_request = pull_request
1307 1321 reviewer.reasons = reviewers[uid]['reasons']
1308 1322 # NOTE(marcink): mandatory shouldn't be changed now
1309 1323 # reviewer.mandatory = reviewers[uid]['reasons']
1310 1324 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1311 1325 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1312 1326 Session().add(reviewer)
1313 1327 added_audit_reviewers.append(reviewer.get_dict())
1314 1328
1315 1329 for uid in ids_to_remove:
1316 1330 changed = True
1317 1331 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1318 1332 # This is an edge case that handles previous state of having the same reviewer twice.
1319 1333 # this CAN happen due to the lack of DB checks
1320 1334 reviewers = PullRequestReviewers.query()\
1321 1335 .filter(PullRequestReviewers.user_id == uid,
1322 1336 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1323 1337 PullRequestReviewers.pull_request == pull_request)\
1324 1338 .all()
1325 1339
1326 1340 for obj in reviewers:
1327 1341 added_audit_reviewers.append(obj.get_dict())
1328 1342 Session().delete(obj)
1329 1343
1330 1344 if changed:
1331 1345 Session().expire_all()
1332 1346 pull_request.updated_on = datetime.datetime.now()
1333 1347 Session().add(pull_request)
1334 1348
1335 1349 # finally store audit logs
1336 1350 for user_data in added_audit_reviewers:
1337 1351 self._log_audit_action(
1338 1352 'repo.pull_request.reviewer.add', {'data': user_data},
1339 1353 user, pull_request)
1340 1354 for user_data in removed_audit_reviewers:
1341 1355 self._log_audit_action(
1342 1356 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1343 1357 user, pull_request)
1344 1358
1345 1359 self.notify_reviewers(pull_request, ids_to_add, user)
1346 1360 return ids_to_add, ids_to_remove
1347 1361
1348 1362 def update_observers(self, pull_request, observer_data, user):
1349 1363 """
1350 1364 Update the observers in the pull request
1351 1365
1352 1366 :param pull_request: the pr to update
1353 1367 :param observer_data: list of tuples
1354 1368 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1355 1369 :param user: current use who triggers this action
1356 1370 """
1357 1371 pull_request = self.__get_pull_request(pull_request)
1358 1372 if pull_request.is_closed():
1359 1373 raise ValueError('This pull request is closed')
1360 1374
1361 1375 observers = {}
1362 1376 for user_id, reasons, mandatory, role, rules in observer_data:
1363 1377 if isinstance(user_id, (int, compat.string_types)):
1364 1378 user_id = self._get_user(user_id).user_id
1365 1379 observers[user_id] = {
1366 1380 'reasons': reasons, 'observers': mandatory, 'role': role}
1367 1381
1368 1382 observers_ids = set(observers.keys())
1369 1383 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1370 1384 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1371 1385
1372 1386 current_observers_ids = set([x.user.user_id for x in current_observers])
1373 1387
1374 1388 ids_to_add = observers_ids.difference(current_observers_ids)
1375 1389 ids_to_remove = current_observers_ids.difference(observers_ids)
1376 1390
1377 1391 log.debug("Adding %s observer", ids_to_add)
1378 1392 log.debug("Removing %s observer", ids_to_remove)
1379 1393 changed = False
1380 1394 added_audit_observers = []
1381 1395 removed_audit_observers = []
1382 1396
1383 1397 for uid in ids_to_add:
1384 1398 changed = True
1385 1399 _usr = self._get_user(uid)
1386 1400 observer = PullRequestReviewers()
1387 1401 observer.user = _usr
1388 1402 observer.pull_request = pull_request
1389 1403 observer.reasons = observers[uid]['reasons']
1390 1404 # NOTE(marcink): mandatory shouldn't be changed now
1391 1405 # observer.mandatory = observer[uid]['reasons']
1392 1406
1393 1407 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1394 1408 observer.role = PullRequestReviewers.ROLE_OBSERVER
1395 1409 Session().add(observer)
1396 1410 added_audit_observers.append(observer.get_dict())
1397 1411
1398 1412 for uid in ids_to_remove:
1399 1413 changed = True
1400 1414 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1401 1415 # This is an edge case that handles previous state of having the same reviewer twice.
1402 1416 # this CAN happen due to the lack of DB checks
1403 1417 observers = PullRequestReviewers.query()\
1404 1418 .filter(PullRequestReviewers.user_id == uid,
1405 1419 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1406 1420 PullRequestReviewers.pull_request == pull_request)\
1407 1421 .all()
1408 1422
1409 1423 for obj in observers:
1410 1424 added_audit_observers.append(obj.get_dict())
1411 1425 Session().delete(obj)
1412 1426
1413 1427 if changed:
1414 1428 Session().expire_all()
1415 1429 pull_request.updated_on = datetime.datetime.now()
1416 1430 Session().add(pull_request)
1417 1431
1418 1432 # finally store audit logs
1419 1433 for user_data in added_audit_observers:
1420 1434 self._log_audit_action(
1421 1435 'repo.pull_request.observer.add', {'data': user_data},
1422 1436 user, pull_request)
1423 1437 for user_data in removed_audit_observers:
1424 1438 self._log_audit_action(
1425 1439 'repo.pull_request.observer.delete', {'old_data': user_data},
1426 1440 user, pull_request)
1427 1441
1428 1442 self.notify_observers(pull_request, ids_to_add, user)
1429 1443 return ids_to_add, ids_to_remove
1430 1444
1431 1445 def get_url(self, pull_request, request=None, permalink=False):
1432 1446 if not request:
1433 1447 request = get_current_request()
1434 1448
1435 1449 if permalink:
1436 1450 return request.route_url(
1437 1451 'pull_requests_global',
1438 1452 pull_request_id=pull_request.pull_request_id,)
1439 1453 else:
1440 1454 return request.route_url('pullrequest_show',
1441 1455 repo_name=safe_str(pull_request.target_repo.repo_name),
1442 1456 pull_request_id=pull_request.pull_request_id,)
1443 1457
1444 1458 def get_shadow_clone_url(self, pull_request, request=None):
1445 1459 """
1446 1460 Returns qualified url pointing to the shadow repository. If this pull
1447 1461 request is closed there is no shadow repository and ``None`` will be
1448 1462 returned.
1449 1463 """
1450 1464 if pull_request.is_closed():
1451 1465 return None
1452 1466 else:
1453 1467 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1454 1468 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1455 1469
1456 1470 def _notify_reviewers(self, pull_request, user_ids, role, user):
1457 1471 # notification to reviewers/observers
1458 1472 if not user_ids:
1459 1473 return
1460 1474
1461 1475 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1462 1476
1463 1477 pull_request_obj = pull_request
1464 1478 # get the current participants of this pull request
1465 1479 recipients = user_ids
1466 1480 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1467 1481
1468 1482 pr_source_repo = pull_request_obj.source_repo
1469 1483 pr_target_repo = pull_request_obj.target_repo
1470 1484
1471 1485 pr_url = h.route_url('pullrequest_show',
1472 1486 repo_name=pr_target_repo.repo_name,
1473 1487 pull_request_id=pull_request_obj.pull_request_id,)
1474 1488
1475 1489 # set some variables for email notification
1476 1490 pr_target_repo_url = h.route_url(
1477 1491 'repo_summary', repo_name=pr_target_repo.repo_name)
1478 1492
1479 1493 pr_source_repo_url = h.route_url(
1480 1494 'repo_summary', repo_name=pr_source_repo.repo_name)
1481 1495
1482 1496 # pull request specifics
1483 1497 pull_request_commits = [
1484 1498 (x.raw_id, x.message)
1485 1499 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1486 1500
1487 1501 current_rhodecode_user = user
1488 1502 kwargs = {
1489 1503 'user': current_rhodecode_user,
1490 1504 'pull_request_author': pull_request.author,
1491 1505 'pull_request': pull_request_obj,
1492 1506 'pull_request_commits': pull_request_commits,
1493 1507
1494 1508 'pull_request_target_repo': pr_target_repo,
1495 1509 'pull_request_target_repo_url': pr_target_repo_url,
1496 1510
1497 1511 'pull_request_source_repo': pr_source_repo,
1498 1512 'pull_request_source_repo_url': pr_source_repo_url,
1499 1513
1500 1514 'pull_request_url': pr_url,
1501 1515 'thread_ids': [pr_url],
1502 1516 'user_role': role
1503 1517 }
1504 1518
1505 1519 # create notification objects, and emails
1506 1520 NotificationModel().create(
1507 1521 created_by=current_rhodecode_user,
1508 1522 notification_subject='', # Filled in based on the notification_type
1509 1523 notification_body='', # Filled in based on the notification_type
1510 1524 notification_type=notification_type,
1511 1525 recipients=recipients,
1512 1526 email_kwargs=kwargs,
1513 1527 )
1514 1528
1515 1529 def notify_reviewers(self, pull_request, reviewers_ids, user):
1516 1530 return self._notify_reviewers(pull_request, reviewers_ids,
1517 1531 PullRequestReviewers.ROLE_REVIEWER, user)
1518 1532
1519 1533 def notify_observers(self, pull_request, observers_ids, user):
1520 1534 return self._notify_reviewers(pull_request, observers_ids,
1521 1535 PullRequestReviewers.ROLE_OBSERVER, user)
1522 1536
1523 1537 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1524 1538 commit_changes, file_changes):
1525 1539
1526 1540 updating_user_id = updating_user.user_id
1527 1541 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1528 1542 # NOTE(marcink): send notification to all other users except to
1529 1543 # person who updated the PR
1530 1544 recipients = reviewers.difference(set([updating_user_id]))
1531 1545
1532 1546 log.debug('Notify following recipients about pull-request update %s', recipients)
1533 1547
1534 1548 pull_request_obj = pull_request
1535 1549
1536 1550 # send email about the update
1537 1551 changed_files = (
1538 1552 file_changes.added + file_changes.modified + file_changes.removed)
1539 1553
1540 1554 pr_source_repo = pull_request_obj.source_repo
1541 1555 pr_target_repo = pull_request_obj.target_repo
1542 1556
1543 1557 pr_url = h.route_url('pullrequest_show',
1544 1558 repo_name=pr_target_repo.repo_name,
1545 1559 pull_request_id=pull_request_obj.pull_request_id,)
1546 1560
1547 1561 # set some variables for email notification
1548 1562 pr_target_repo_url = h.route_url(
1549 1563 'repo_summary', repo_name=pr_target_repo.repo_name)
1550 1564
1551 1565 pr_source_repo_url = h.route_url(
1552 1566 'repo_summary', repo_name=pr_source_repo.repo_name)
1553 1567
1554 1568 email_kwargs = {
1555 1569 'date': datetime.datetime.now(),
1556 1570 'updating_user': updating_user,
1557 1571
1558 1572 'pull_request': pull_request_obj,
1559 1573
1560 1574 'pull_request_target_repo': pr_target_repo,
1561 1575 'pull_request_target_repo_url': pr_target_repo_url,
1562 1576
1563 1577 'pull_request_source_repo': pr_source_repo,
1564 1578 'pull_request_source_repo_url': pr_source_repo_url,
1565 1579
1566 1580 'pull_request_url': pr_url,
1567 1581
1568 1582 'ancestor_commit_id': ancestor_commit_id,
1569 1583 'added_commits': commit_changes.added,
1570 1584 'removed_commits': commit_changes.removed,
1571 1585 'changed_files': changed_files,
1572 1586 'added_files': file_changes.added,
1573 1587 'modified_files': file_changes.modified,
1574 1588 'removed_files': file_changes.removed,
1575 1589 'thread_ids': [pr_url],
1576 1590 }
1577 1591
1578 1592 # create notification objects, and emails
1579 1593 NotificationModel().create(
1580 1594 created_by=updating_user,
1581 1595 notification_subject='', # Filled in based on the notification_type
1582 1596 notification_body='', # Filled in based on the notification_type
1583 1597 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1584 1598 recipients=recipients,
1585 1599 email_kwargs=email_kwargs,
1586 1600 )
1587 1601
1588 1602 def delete(self, pull_request, user=None):
1589 1603 if not user:
1590 1604 user = getattr(get_current_rhodecode_user(), 'username', None)
1591 1605
1592 1606 pull_request = self.__get_pull_request(pull_request)
1593 1607 old_data = pull_request.get_api_data(with_merge_state=False)
1594 1608 self._cleanup_merge_workspace(pull_request)
1595 1609 self._log_audit_action(
1596 1610 'repo.pull_request.delete', {'old_data': old_data},
1597 1611 user, pull_request)
1598 1612 Session().delete(pull_request)
1599 1613
1600 1614 def close_pull_request(self, pull_request, user):
1601 1615 pull_request = self.__get_pull_request(pull_request)
1602 1616 self._cleanup_merge_workspace(pull_request)
1603 1617 pull_request.status = PullRequest.STATUS_CLOSED
1604 1618 pull_request.updated_on = datetime.datetime.now()
1605 1619 Session().add(pull_request)
1606 1620 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1607 1621
1608 1622 pr_data = pull_request.get_api_data(with_merge_state=False)
1609 1623 self._log_audit_action(
1610 1624 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1611 1625
1612 1626 def close_pull_request_with_comment(
1613 1627 self, pull_request, user, repo, message=None, auth_user=None):
1614 1628
1615 1629 pull_request_review_status = pull_request.calculated_review_status()
1616 1630
1617 1631 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1618 1632 # approved only if we have voting consent
1619 1633 status = ChangesetStatus.STATUS_APPROVED
1620 1634 else:
1621 1635 status = ChangesetStatus.STATUS_REJECTED
1622 1636 status_lbl = ChangesetStatus.get_status_lbl(status)
1623 1637
1624 1638 default_message = (
1625 1639 'Closing with status change {transition_icon} {status}.'
1626 1640 ).format(transition_icon='>', status=status_lbl)
1627 1641 text = message or default_message
1628 1642
1629 1643 # create a comment, and link it to new status
1630 1644 comment = CommentsModel().create(
1631 1645 text=text,
1632 1646 repo=repo.repo_id,
1633 1647 user=user.user_id,
1634 1648 pull_request=pull_request.pull_request_id,
1635 1649 status_change=status_lbl,
1636 1650 status_change_type=status,
1637 1651 closing_pr=True,
1638 1652 auth_user=auth_user,
1639 1653 )
1640 1654
1641 1655 # calculate old status before we change it
1642 1656 old_calculated_status = pull_request.calculated_review_status()
1643 1657 ChangesetStatusModel().set_status(
1644 1658 repo.repo_id,
1645 1659 status,
1646 1660 user.user_id,
1647 1661 comment=comment,
1648 1662 pull_request=pull_request.pull_request_id
1649 1663 )
1650 1664
1651 1665 Session().flush()
1652 1666
1653 1667 self.trigger_pull_request_hook(pull_request, user, 'comment',
1654 1668 data={'comment': comment})
1655 1669
1656 1670 # we now calculate the status of pull request again, and based on that
1657 1671 # calculation trigger status change. This might happen in cases
1658 1672 # that non-reviewer admin closes a pr, which means his vote doesn't
1659 1673 # change the status, while if he's a reviewer this might change it.
1660 1674 calculated_status = pull_request.calculated_review_status()
1661 1675 if old_calculated_status != calculated_status:
1662 1676 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1663 1677 data={'status': calculated_status})
1664 1678
1665 1679 # finally close the PR
1666 1680 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1667 1681
1668 1682 return comment, status
1669 1683
1670 1684 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1671 1685 _ = translator or get_current_request().translate
1672 1686
1673 1687 if not self._is_merge_enabled(pull_request):
1674 1688 return None, False, _('Server-side pull request merging is disabled.')
1675 1689
1676 1690 if pull_request.is_closed():
1677 1691 return None, False, _('This pull request is closed.')
1678 1692
1679 1693 merge_possible, msg = self._check_repo_requirements(
1680 1694 target=pull_request.target_repo, source=pull_request.source_repo,
1681 1695 translator=_)
1682 1696 if not merge_possible:
1683 1697 return None, merge_possible, msg
1684 1698
1685 1699 try:
1686 1700 merge_response = self._try_merge(
1687 1701 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1688 1702 log.debug("Merge response: %s", merge_response)
1689 1703 return merge_response, merge_response.possible, merge_response.merge_status_message
1690 1704 except NotImplementedError:
1691 1705 return None, False, _('Pull request merging is not supported.')
1692 1706
1693 1707 def _check_repo_requirements(self, target, source, translator):
1694 1708 """
1695 1709 Check if `target` and `source` have compatible requirements.
1696 1710
1697 1711 Currently this is just checking for largefiles.
1698 1712 """
1699 1713 _ = translator
1700 1714 target_has_largefiles = self._has_largefiles(target)
1701 1715 source_has_largefiles = self._has_largefiles(source)
1702 1716 merge_possible = True
1703 1717 message = u''
1704 1718
1705 1719 if target_has_largefiles != source_has_largefiles:
1706 1720 merge_possible = False
1707 1721 if source_has_largefiles:
1708 1722 message = _(
1709 1723 'Target repository large files support is disabled.')
1710 1724 else:
1711 1725 message = _(
1712 1726 'Source repository large files support is disabled.')
1713 1727
1714 1728 return merge_possible, message
1715 1729
1716 1730 def _has_largefiles(self, repo):
1717 1731 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1718 1732 'extensions', 'largefiles')
1719 1733 return largefiles_ui and largefiles_ui[0].active
1720 1734
1721 1735 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1722 1736 """
1723 1737 Try to merge the pull request and return the merge status.
1724 1738 """
1725 1739 log.debug(
1726 1740 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1727 1741 pull_request.pull_request_id, force_shadow_repo_refresh)
1728 1742 target_vcs = pull_request.target_repo.scm_instance()
1729 1743 # Refresh the target reference.
1730 1744 try:
1731 1745 target_ref = self._refresh_reference(
1732 1746 pull_request.target_ref_parts, target_vcs)
1733 1747 except CommitDoesNotExistError:
1734 1748 merge_state = MergeResponse(
1735 1749 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1736 1750 metadata={'target_ref': pull_request.target_ref_parts})
1737 1751 return merge_state
1738 1752
1739 1753 target_locked = pull_request.target_repo.locked
1740 1754 if target_locked and target_locked[0]:
1741 1755 locked_by = 'user:{}'.format(target_locked[0])
1742 1756 log.debug("The target repository is locked by %s.", locked_by)
1743 1757 merge_state = MergeResponse(
1744 1758 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1745 1759 metadata={'locked_by': locked_by})
1746 1760 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1747 1761 pull_request, target_ref):
1748 1762 log.debug("Refreshing the merge status of the repository.")
1749 1763 merge_state = self._refresh_merge_state(
1750 1764 pull_request, target_vcs, target_ref)
1751 1765 else:
1752 1766 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1753 1767 metadata = {
1754 1768 'unresolved_files': '',
1755 1769 'target_ref': pull_request.target_ref_parts,
1756 1770 'source_ref': pull_request.source_ref_parts,
1757 1771 }
1758 1772 if pull_request.last_merge_metadata:
1759 1773 metadata.update(pull_request.last_merge_metadata_parsed)
1760 1774
1761 1775 if not possible and target_ref.type == 'branch':
1762 1776 # NOTE(marcink): case for mercurial multiple heads on branch
1763 1777 heads = target_vcs._heads(target_ref.name)
1764 1778 if len(heads) != 1:
1765 1779 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1766 1780 metadata.update({
1767 1781 'heads': heads
1768 1782 })
1769 1783
1770 1784 merge_state = MergeResponse(
1771 1785 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1772 1786
1773 1787 return merge_state
1774 1788
1775 1789 def _refresh_reference(self, reference, vcs_repository):
1776 1790 if reference.type in self.UPDATABLE_REF_TYPES:
1777 1791 name_or_id = reference.name
1778 1792 else:
1779 1793 name_or_id = reference.commit_id
1780 1794
1781 1795 refreshed_commit = vcs_repository.get_commit(name_or_id)
1782 1796 refreshed_reference = Reference(
1783 1797 reference.type, reference.name, refreshed_commit.raw_id)
1784 1798 return refreshed_reference
1785 1799
1786 1800 def _needs_merge_state_refresh(self, pull_request, target_reference):
1787 1801 return not(
1788 1802 pull_request.revisions and
1789 1803 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1790 1804 target_reference.commit_id == pull_request._last_merge_target_rev)
1791 1805
1792 1806 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1793 1807 workspace_id = self._workspace_id(pull_request)
1794 1808 source_vcs = pull_request.source_repo.scm_instance()
1795 1809 repo_id = pull_request.target_repo.repo_id
1796 1810 use_rebase = self._use_rebase_for_merging(pull_request)
1797 1811 close_branch = self._close_branch_before_merging(pull_request)
1798 1812 merge_state = target_vcs.merge(
1799 1813 repo_id, workspace_id,
1800 1814 target_reference, source_vcs, pull_request.source_ref_parts,
1801 1815 dry_run=True, use_rebase=use_rebase,
1802 1816 close_branch=close_branch)
1803 1817
1804 1818 # Do not store the response if there was an unknown error.
1805 1819 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1806 1820 pull_request._last_merge_source_rev = \
1807 1821 pull_request.source_ref_parts.commit_id
1808 1822 pull_request._last_merge_target_rev = target_reference.commit_id
1809 1823 pull_request.last_merge_status = merge_state.failure_reason
1810 1824 pull_request.last_merge_metadata = merge_state.metadata
1811 1825
1812 1826 pull_request.shadow_merge_ref = merge_state.merge_ref
1813 1827 Session().add(pull_request)
1814 1828 Session().commit()
1815 1829
1816 1830 return merge_state
1817 1831
1818 1832 def _workspace_id(self, pull_request):
1819 1833 workspace_id = 'pr-%s' % pull_request.pull_request_id
1820 1834 return workspace_id
1821 1835
1822 1836 def generate_repo_data(self, repo, commit_id=None, branch=None,
1823 1837 bookmark=None, translator=None):
1824 1838 from rhodecode.model.repo import RepoModel
1825 1839
1826 1840 all_refs, selected_ref = \
1827 1841 self._get_repo_pullrequest_sources(
1828 1842 repo.scm_instance(), commit_id=commit_id,
1829 1843 branch=branch, bookmark=bookmark, translator=translator)
1830 1844
1831 1845 refs_select2 = []
1832 1846 for element in all_refs:
1833 1847 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1834 1848 refs_select2.append({'text': element[1], 'children': children})
1835 1849
1836 1850 return {
1837 1851 'user': {
1838 1852 'user_id': repo.user.user_id,
1839 1853 'username': repo.user.username,
1840 1854 'firstname': repo.user.first_name,
1841 1855 'lastname': repo.user.last_name,
1842 1856 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1843 1857 },
1844 1858 'name': repo.repo_name,
1845 1859 'link': RepoModel().get_url(repo),
1846 1860 'description': h.chop_at_smart(repo.description_safe, '\n'),
1847 1861 'refs': {
1848 1862 'all_refs': all_refs,
1849 1863 'selected_ref': selected_ref,
1850 1864 'select2_refs': refs_select2
1851 1865 }
1852 1866 }
1853 1867
1854 1868 def generate_pullrequest_title(self, source, source_ref, target):
1855 1869 return u'{source}#{at_ref} to {target}'.format(
1856 1870 source=source,
1857 1871 at_ref=source_ref,
1858 1872 target=target,
1859 1873 )
1860 1874
1861 1875 def _cleanup_merge_workspace(self, pull_request):
1862 1876 # Merging related cleanup
1863 1877 repo_id = pull_request.target_repo.repo_id
1864 1878 target_scm = pull_request.target_repo.scm_instance()
1865 1879 workspace_id = self._workspace_id(pull_request)
1866 1880
1867 1881 try:
1868 1882 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1869 1883 except NotImplementedError:
1870 1884 pass
1871 1885
1872 1886 def _get_repo_pullrequest_sources(
1873 1887 self, repo, commit_id=None, branch=None, bookmark=None,
1874 1888 translator=None):
1875 1889 """
1876 1890 Return a structure with repo's interesting commits, suitable for
1877 1891 the selectors in pullrequest controller
1878 1892
1879 1893 :param commit_id: a commit that must be in the list somehow
1880 1894 and selected by default
1881 1895 :param branch: a branch that must be in the list and selected
1882 1896 by default - even if closed
1883 1897 :param bookmark: a bookmark that must be in the list and selected
1884 1898 """
1885 1899 _ = translator or get_current_request().translate
1886 1900
1887 1901 commit_id = safe_str(commit_id) if commit_id else None
1888 1902 branch = safe_unicode(branch) if branch else None
1889 1903 bookmark = safe_unicode(bookmark) if bookmark else None
1890 1904
1891 1905 selected = None
1892 1906
1893 1907 # order matters: first source that has commit_id in it will be selected
1894 1908 sources = []
1895 1909 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1896 1910 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1897 1911
1898 1912 if commit_id:
1899 1913 ref_commit = (h.short_id(commit_id), commit_id)
1900 1914 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1901 1915
1902 1916 sources.append(
1903 1917 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1904 1918 )
1905 1919
1906 1920 groups = []
1907 1921
1908 1922 for group_key, ref_list, group_name, match in sources:
1909 1923 group_refs = []
1910 1924 for ref_name, ref_id in ref_list:
1911 1925 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1912 1926 group_refs.append((ref_key, ref_name))
1913 1927
1914 1928 if not selected:
1915 1929 if set([commit_id, match]) & set([ref_id, ref_name]):
1916 1930 selected = ref_key
1917 1931
1918 1932 if group_refs:
1919 1933 groups.append((group_refs, group_name))
1920 1934
1921 1935 if not selected:
1922 1936 ref = commit_id or branch or bookmark
1923 1937 if ref:
1924 1938 raise CommitDoesNotExistError(
1925 1939 u'No commit refs could be found matching: {}'.format(ref))
1926 1940 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1927 1941 selected = u'branch:{}:{}'.format(
1928 1942 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1929 1943 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1930 1944 )
1931 1945 elif repo.commit_ids:
1932 1946 # make the user select in this case
1933 1947 selected = None
1934 1948 else:
1935 1949 raise EmptyRepositoryError()
1936 1950 return groups, selected
1937 1951
1938 1952 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1939 1953 hide_whitespace_changes, diff_context):
1940 1954
1941 1955 return self._get_diff_from_pr_or_version(
1942 1956 source_repo, source_ref_id, target_ref_id,
1943 1957 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1944 1958
1945 1959 def _get_diff_from_pr_or_version(
1946 1960 self, source_repo, source_ref_id, target_ref_id,
1947 1961 hide_whitespace_changes, diff_context):
1948 1962
1949 1963 target_commit = source_repo.get_commit(
1950 1964 commit_id=safe_str(target_ref_id))
1951 1965 source_commit = source_repo.get_commit(
1952 1966 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1953 1967 if isinstance(source_repo, Repository):
1954 1968 vcs_repo = source_repo.scm_instance()
1955 1969 else:
1956 1970 vcs_repo = source_repo
1957 1971
1958 1972 # TODO: johbo: In the context of an update, we cannot reach
1959 1973 # the old commit anymore with our normal mechanisms. It needs
1960 1974 # some sort of special support in the vcs layer to avoid this
1961 1975 # workaround.
1962 1976 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1963 1977 vcs_repo.alias == 'git'):
1964 1978 source_commit.raw_id = safe_str(source_ref_id)
1965 1979
1966 1980 log.debug('calculating diff between '
1967 1981 'source_ref:%s and target_ref:%s for repo `%s`',
1968 1982 target_ref_id, source_ref_id,
1969 1983 safe_unicode(vcs_repo.path))
1970 1984
1971 1985 vcs_diff = vcs_repo.get_diff(
1972 1986 commit1=target_commit, commit2=source_commit,
1973 1987 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1974 1988 return vcs_diff
1975 1989
1976 1990 def _is_merge_enabled(self, pull_request):
1977 1991 return self._get_general_setting(
1978 1992 pull_request, 'rhodecode_pr_merge_enabled')
1979 1993
1980 1994 def _use_rebase_for_merging(self, pull_request):
1981 1995 repo_type = pull_request.target_repo.repo_type
1982 1996 if repo_type == 'hg':
1983 1997 return self._get_general_setting(
1984 1998 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1985 1999 elif repo_type == 'git':
1986 2000 return self._get_general_setting(
1987 2001 pull_request, 'rhodecode_git_use_rebase_for_merging')
1988 2002
1989 2003 return False
1990 2004
1991 2005 def _user_name_for_merging(self, pull_request, user):
1992 2006 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1993 2007 if env_user_name_attr and hasattr(user, env_user_name_attr):
1994 2008 user_name_attr = env_user_name_attr
1995 2009 else:
1996 2010 user_name_attr = 'short_contact'
1997 2011
1998 2012 user_name = getattr(user, user_name_attr)
1999 2013 return user_name
2000 2014
2001 2015 def _close_branch_before_merging(self, pull_request):
2002 2016 repo_type = pull_request.target_repo.repo_type
2003 2017 if repo_type == 'hg':
2004 2018 return self._get_general_setting(
2005 2019 pull_request, 'rhodecode_hg_close_branch_before_merging')
2006 2020 elif repo_type == 'git':
2007 2021 return self._get_general_setting(
2008 2022 pull_request, 'rhodecode_git_close_branch_before_merging')
2009 2023
2010 2024 return False
2011 2025
2012 2026 def _get_general_setting(self, pull_request, settings_key, default=False):
2013 2027 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2014 2028 settings = settings_model.get_general_settings()
2015 2029 return settings.get(settings_key, default)
2016 2030
2017 2031 def _log_audit_action(self, action, action_data, user, pull_request):
2018 2032 audit_logger.store(
2019 2033 action=action,
2020 2034 action_data=action_data,
2021 2035 user=user,
2022 2036 repo=pull_request.target_repo)
2023 2037
2024 2038 def get_reviewer_functions(self):
2025 2039 """
2026 2040 Fetches functions for validation and fetching default reviewers.
2027 2041 If available we use the EE package, else we fallback to CE
2028 2042 package functions
2029 2043 """
2030 2044 try:
2031 2045 from rc_reviewers.utils import get_default_reviewers_data
2032 2046 from rc_reviewers.utils import validate_default_reviewers
2033 2047 from rc_reviewers.utils import validate_observers
2034 2048 except ImportError:
2035 2049 from rhodecode.apps.repository.utils import get_default_reviewers_data
2036 2050 from rhodecode.apps.repository.utils import validate_default_reviewers
2037 2051 from rhodecode.apps.repository.utils import validate_observers
2038 2052
2039 2053 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2040 2054
2041 2055
2042 2056 class MergeCheck(object):
2043 2057 """
2044 2058 Perform Merge Checks and returns a check object which stores information
2045 2059 about merge errors, and merge conditions
2046 2060 """
2047 2061 TODO_CHECK = 'todo'
2048 2062 PERM_CHECK = 'perm'
2049 2063 REVIEW_CHECK = 'review'
2050 2064 MERGE_CHECK = 'merge'
2051 2065 WIP_CHECK = 'wip'
2052 2066
2053 2067 def __init__(self):
2054 2068 self.review_status = None
2055 2069 self.merge_possible = None
2056 2070 self.merge_msg = ''
2057 2071 self.merge_response = None
2058 2072 self.failed = None
2059 2073 self.errors = []
2060 2074 self.error_details = OrderedDict()
2061 2075 self.source_commit = AttributeDict()
2062 2076 self.target_commit = AttributeDict()
2063 2077 self.reviewers_count = 0
2064 2078 self.observers_count = 0
2065 2079
2066 2080 def __repr__(self):
2067 2081 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2068 2082 self.merge_possible, self.failed, self.errors)
2069 2083
2070 2084 def push_error(self, error_type, message, error_key, details):
2071 2085 self.failed = True
2072 2086 self.errors.append([error_type, message])
2073 2087 self.error_details[error_key] = dict(
2074 2088 details=details,
2075 2089 error_type=error_type,
2076 2090 message=message
2077 2091 )
2078 2092
2079 2093 @classmethod
2080 2094 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2081 2095 force_shadow_repo_refresh=False):
2082 2096 _ = translator
2083 2097 merge_check = cls()
2084 2098
2085 2099 # title has WIP:
2086 2100 if pull_request.work_in_progress:
2087 2101 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2088 2102
2089 2103 msg = _('WIP marker in title prevents from accidental merge.')
2090 2104 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2091 2105 if fail_early:
2092 2106 return merge_check
2093 2107
2094 2108 # permissions to merge
2095 2109 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2096 2110 if not user_allowed_to_merge:
2097 2111 log.debug("MergeCheck: cannot merge, approval is pending.")
2098 2112
2099 2113 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2100 2114 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2101 2115 if fail_early:
2102 2116 return merge_check
2103 2117
2104 2118 # permission to merge into the target branch
2105 2119 target_commit_id = pull_request.target_ref_parts.commit_id
2106 2120 if pull_request.target_ref_parts.type == 'branch':
2107 2121 branch_name = pull_request.target_ref_parts.name
2108 2122 else:
2109 2123 # for mercurial we can always figure out the branch from the commit
2110 2124 # in case of bookmark
2111 2125 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2112 2126 branch_name = target_commit.branch
2113 2127
2114 2128 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2115 2129 pull_request.target_repo.repo_name, branch_name)
2116 2130 if branch_perm and branch_perm == 'branch.none':
2117 2131 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2118 2132 branch_name, rule)
2119 2133 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2120 2134 if fail_early:
2121 2135 return merge_check
2122 2136
2123 2137 # review status, must be always present
2124 2138 review_status = pull_request.calculated_review_status()
2125 2139 merge_check.review_status = review_status
2126 2140 merge_check.reviewers_count = pull_request.reviewers_count
2127 2141 merge_check.observers_count = pull_request.observers_count
2128 2142
2129 2143 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2130 2144 if not status_approved and merge_check.reviewers_count:
2131 2145 log.debug("MergeCheck: cannot merge, approval is pending.")
2132 2146 msg = _('Pull request reviewer approval is pending.')
2133 2147
2134 2148 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2135 2149
2136 2150 if fail_early:
2137 2151 return merge_check
2138 2152
2139 2153 # left over TODOs
2140 2154 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2141 2155 if todos:
2142 2156 log.debug("MergeCheck: cannot merge, {} "
2143 2157 "unresolved TODOs left.".format(len(todos)))
2144 2158
2145 2159 if len(todos) == 1:
2146 2160 msg = _('Cannot merge, {} TODO still not resolved.').format(
2147 2161 len(todos))
2148 2162 else:
2149 2163 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2150 2164 len(todos))
2151 2165
2152 2166 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2153 2167
2154 2168 if fail_early:
2155 2169 return merge_check
2156 2170
2157 2171 # merge possible, here is the filesystem simulation + shadow repo
2158 2172 merge_response, merge_status, msg = PullRequestModel().merge_status(
2159 2173 pull_request, translator=translator,
2160 2174 force_shadow_repo_refresh=force_shadow_repo_refresh)
2161 2175
2162 2176 merge_check.merge_possible = merge_status
2163 2177 merge_check.merge_msg = msg
2164 2178 merge_check.merge_response = merge_response
2165 2179
2166 2180 source_ref_id = pull_request.source_ref_parts.commit_id
2167 2181 target_ref_id = pull_request.target_ref_parts.commit_id
2168 2182
2169 2183 try:
2170 2184 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2171 2185 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2172 2186 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2173 2187 merge_check.source_commit.current_raw_id = source_commit.raw_id
2174 2188 merge_check.source_commit.previous_raw_id = source_ref_id
2175 2189
2176 2190 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2177 2191 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2178 2192 merge_check.target_commit.current_raw_id = target_commit.raw_id
2179 2193 merge_check.target_commit.previous_raw_id = target_ref_id
2180 2194 except (SourceRefMissing, TargetRefMissing):
2181 2195 pass
2182 2196
2183 2197 if not merge_status:
2184 2198 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2185 2199 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2186 2200
2187 2201 if fail_early:
2188 2202 return merge_check
2189 2203
2190 2204 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2191 2205 return merge_check
2192 2206
2193 2207 @classmethod
2194 2208 def get_merge_conditions(cls, pull_request, translator):
2195 2209 _ = translator
2196 2210 merge_details = {}
2197 2211
2198 2212 model = PullRequestModel()
2199 2213 use_rebase = model._use_rebase_for_merging(pull_request)
2200 2214
2201 2215 if use_rebase:
2202 2216 merge_details['merge_strategy'] = dict(
2203 2217 details={},
2204 2218 message=_('Merge strategy: rebase')
2205 2219 )
2206 2220 else:
2207 2221 merge_details['merge_strategy'] = dict(
2208 2222 details={},
2209 2223 message=_('Merge strategy: explicit merge commit')
2210 2224 )
2211 2225
2212 2226 close_branch = model._close_branch_before_merging(pull_request)
2213 2227 if close_branch:
2214 2228 repo_type = pull_request.target_repo.repo_type
2215 2229 close_msg = ''
2216 2230 if repo_type == 'hg':
2217 2231 close_msg = _('Source branch will be closed before the merge.')
2218 2232 elif repo_type == 'git':
2219 2233 close_msg = _('Source branch will be deleted after the merge.')
2220 2234
2221 2235 merge_details['close_branch'] = dict(
2222 2236 details={},
2223 2237 message=close_msg
2224 2238 )
2225 2239
2226 2240 return merge_details
2227 2241
2228 2242
2229 2243 ChangeTuple = collections.namedtuple(
2230 2244 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2231 2245
2232 2246 FileChangeTuple = collections.namedtuple(
2233 2247 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,80 +1,88 b''
1 1 ## Changesets table !
2 2 <%namespace name="base" file="/base/base.mako"/>
3 3
4 4 %if c.ancestor:
5 5 <div class="ancestor">${_('Compare was calculated based on this common ancestor commit')}:
6 6 <a href="${h.route_path('repo_commit', repo_name=c.repo_name, commit_id=c.ancestor)}">${h.short_id(c.ancestor)}</a>
7 7 <input id="common_ancestor" type="hidden" name="common_ancestor" value="${c.ancestor}">
8 8 </div>
9 9 %endif
10 10
11 11 <div class="container">
12 12 <input type="hidden" name="__start__" value="revisions:sequence">
13 13 <table class="rctable compare_view_commits">
14 14 <tr>
15 % if hasattr(c, 'commit_versions'):
16 <th>ver</th>
17 % endif
15 18 <th>${_('Time')}</th>
16 19 <th>${_('Author')}</th>
17 20 <th>${_('Commit')}</th>
18 21 <th></th>
19 22 <th>${_('Description')}</th>
20 23 </tr>
21 24 ## to speed up lookups cache some functions before the loop
22 25 <%
23 26 active_patterns = h.get_active_pattern_entries(c.repo_name)
24 27 urlify_commit_message = h.partial(h.urlify_commit_message, active_pattern_entries=active_patterns, issues_container=getattr(c, 'referenced_commit_issues', None))
25 28 %>
26 29
27 30 %for commit in c.commit_ranges:
28 31 <tr id="row-${commit.raw_id}"
29 32 commit_id="${commit.raw_id}"
30 33 class="compare_select"
31 34 style="${'display: none' if c.collapse_all_commits else ''}"
32 35 >
36 % if hasattr(c, 'commit_versions'):
37 <td class="tooltip" title="${_('Pull request version this commit was introduced')}">
38 <code>${('v{}'.format(c.commit_versions[commit.raw_id][0]) if c.commit_versions[commit.raw_id] else 'latest')}</code>
39 </td>
40 % endif
33 41 <td class="td-time">
34 42 ${h.age_component(commit.date)}
35 43 </td>
36 44 <td class="td-user">
37 45 ${base.gravatar_with_user(commit.author, 16, tooltip=True)}
38 46 </td>
39 47 <td class="td-hash">
40 48 <code>
41 49 <a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=commit.raw_id)}">
42 50 r${commit.idx}:${h.short_id(commit.raw_id)}
43 51 </a>
44 52 ${h.hidden('revisions',commit.raw_id)}
45 53 </code>
46 54 </td>
47 55 <td class="td-message expand_commit" data-commit-id="${commit.raw_id}" title="${_('Expand commit message')}" onclick="commitsController.expandCommit(this); return false">
48 56 <i class="icon-expand-linked"></i>
49 57 </td>
50 58 <td class="mid td-description">
51 59 <div class="log-container truncate-wrap">
52 60 <div class="message truncate" id="c-${commit.raw_id}" data-message-raw="${commit.message}">${urlify_commit_message(commit.message, c.repo_name)}</div>
53 61 </div>
54 62 </td>
55 63 </tr>
56 64 %endfor
57 65 <tr class="compare_select_hidden" style="${('' if c.collapse_all_commits else 'display: none')}">
58 66 <td colspan="5">
59 67 ${_ungettext('{} commit hidden, click expand to show them.', '{} commits hidden, click expand to show them.', len(c.commit_ranges)).format(len(c.commit_ranges))}
60 68 </td>
61 69 </tr>
62 70 % if not c.commit_ranges:
63 71 <tr class="compare_select">
64 72 <td colspan="5">
65 73 ${_('No commits in this compare')}
66 74 </td>
67 75 </tr>
68 76 % endif
69 77 </table>
70 78 <input type="hidden" name="__end__" value="revisions:sequence">
71 79
72 80 </div>
73 81
74 82 <script>
75 83 commitsController = new CommitsController();
76 84 $('.compare_select').on('click',function(e){
77 85 var cid = $(this).attr('commit_id');
78 86 $('#row-'+cid).toggleClass('hl', !$('#row-'+cid).hasClass('hl'));
79 87 });
80 88 </script>
General Comments 0
You need to be logged in to leave comments. Login now