##// END OF EJS Templates
pull-requests: added quick filter to grid view.
marcink -
r4055:8b8d49d6 default
parent child Browse files
Show More
@@ -1,1470 +1,1470 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33
34 34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 35 from rhodecode.lib.base import vcs_operation_context
36 36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 37 from rhodecode.lib.ext_json import json
38 38 from rhodecode.lib.auth import (
39 39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 40 NotAnonymous, CSRFRequired)
41 41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 44 RepositoryRequirementError, EmptyRepositoryError)
45 45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 46 from rhodecode.model.comment import CommentsModel
47 47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 48 ChangesetComment, ChangesetStatus, Repository)
49 49 from rhodecode.model.forms import PullRequestForm
50 50 from rhodecode.model.meta import Session
51 51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 52 from rhodecode.model.scm import ScmModel
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58 58
59 59 def load_default_context(self):
60 60 c = self._get_local_tmpl_context(include_app_defaults=True)
61 61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 63 # backward compat., we use for OLD PRs a plain renderer
64 64 c.renderer = 'plain'
65 65 return c
66 66
67 67 def _get_pull_requests_list(
68 68 self, repo_name, source, filter_type, opened_by, statuses):
69 69
70 70 draw, start, limit = self._extract_chunk(self.request)
71 71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 72 _render = self.request.get_partial_renderer(
73 73 'rhodecode:templates/data_table/_dt_elements.mako')
74 74
75 75 # pagination
76 76
77 77 if filter_type == 'awaiting_review':
78 78 pull_requests = PullRequestModel().get_awaiting_review(
79 repo_name, source=source, opened_by=opened_by,
79 repo_name, search_q=search_q, source=source, opened_by=opened_by,
80 80 statuses=statuses, offset=start, length=limit,
81 81 order_by=order_by, order_dir=order_dir)
82 82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 repo_name, source=source, statuses=statuses,
83 repo_name, search_q=search_q, source=source, statuses=statuses,
84 84 opened_by=opened_by)
85 85 elif filter_type == 'awaiting_my_review':
86 86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 repo_name, source=source, opened_by=opened_by,
87 repo_name, search_q=search_q, source=source, opened_by=opened_by,
88 88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 89 offset=start, length=limit, order_by=order_by,
90 90 order_dir=order_dir)
91 91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 repo_name, source=source, user_id=self._rhodecode_user.user_id,
92 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
93 93 statuses=statuses, opened_by=opened_by)
94 94 else:
95 95 pull_requests = PullRequestModel().get_all(
96 repo_name, source=source, opened_by=opened_by,
96 repo_name, search_q=search_q, source=source, opened_by=opened_by,
97 97 statuses=statuses, offset=start, length=limit,
98 98 order_by=order_by, order_dir=order_dir)
99 99 pull_requests_total_count = PullRequestModel().count_all(
100 repo_name, source=source, statuses=statuses,
100 repo_name, search_q=search_q, source=source, statuses=statuses,
101 101 opened_by=opened_by)
102 102
103 103 data = []
104 104 comments_model = CommentsModel()
105 105 for pr in pull_requests:
106 106 comments = comments_model.get_all_comments(
107 107 self.db_repo.repo_id, pull_request=pr)
108 108
109 109 data.append({
110 110 'name': _render('pullrequest_name',
111 111 pr.pull_request_id, pr.target_repo.repo_name),
112 112 'name_raw': pr.pull_request_id,
113 113 'status': _render('pullrequest_status',
114 114 pr.calculated_review_status()),
115 115 'title': _render('pullrequest_title', pr.title, pr.description),
116 116 'description': h.escape(pr.description),
117 117 'updated_on': _render('pullrequest_updated_on',
118 118 h.datetime_to_time(pr.updated_on)),
119 119 'updated_on_raw': h.datetime_to_time(pr.updated_on),
120 120 'created_on': _render('pullrequest_updated_on',
121 121 h.datetime_to_time(pr.created_on)),
122 122 'created_on_raw': h.datetime_to_time(pr.created_on),
123 123 'state': pr.pull_request_state,
124 124 'author': _render('pullrequest_author',
125 125 pr.author.full_contact, ),
126 126 'author_raw': pr.author.full_name,
127 127 'comments': _render('pullrequest_comments', len(comments)),
128 128 'comments_raw': len(comments),
129 129 'closed': pr.is_closed(),
130 130 })
131 131
132 132 data = ({
133 133 'draw': draw,
134 134 'data': data,
135 135 'recordsTotal': pull_requests_total_count,
136 136 'recordsFiltered': pull_requests_total_count,
137 137 })
138 138 return data
139 139
140 140 @LoginRequired()
141 141 @HasRepoPermissionAnyDecorator(
142 142 'repository.read', 'repository.write', 'repository.admin')
143 143 @view_config(
144 144 route_name='pullrequest_show_all', request_method='GET',
145 145 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
146 146 def pull_request_list(self):
147 147 c = self.load_default_context()
148 148
149 149 req_get = self.request.GET
150 150 c.source = str2bool(req_get.get('source'))
151 151 c.closed = str2bool(req_get.get('closed'))
152 152 c.my = str2bool(req_get.get('my'))
153 153 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
154 154 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
155 155
156 156 c.active = 'open'
157 157 if c.my:
158 158 c.active = 'my'
159 159 if c.closed:
160 160 c.active = 'closed'
161 161 if c.awaiting_review and not c.source:
162 162 c.active = 'awaiting'
163 163 if c.source and not c.awaiting_review:
164 164 c.active = 'source'
165 165 if c.awaiting_my_review:
166 166 c.active = 'awaiting_my'
167 167
168 168 return self._get_template_context(c)
169 169
170 170 @LoginRequired()
171 171 @HasRepoPermissionAnyDecorator(
172 172 'repository.read', 'repository.write', 'repository.admin')
173 173 @view_config(
174 174 route_name='pullrequest_show_all_data', request_method='GET',
175 175 renderer='json_ext', xhr=True)
176 176 def pull_request_list_data(self):
177 177 self.load_default_context()
178 178
179 179 # additional filters
180 180 req_get = self.request.GET
181 181 source = str2bool(req_get.get('source'))
182 182 closed = str2bool(req_get.get('closed'))
183 183 my = str2bool(req_get.get('my'))
184 184 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 185 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186 186
187 187 filter_type = 'awaiting_review' if awaiting_review \
188 188 else 'awaiting_my_review' if awaiting_my_review \
189 189 else None
190 190
191 191 opened_by = None
192 192 if my:
193 193 opened_by = [self._rhodecode_user.user_id]
194 194
195 195 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 196 if closed:
197 197 statuses = [PullRequest.STATUS_CLOSED]
198 198
199 199 data = self._get_pull_requests_list(
200 200 repo_name=self.db_repo_name, source=source,
201 201 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202 202
203 203 return data
204 204
205 205 def _is_diff_cache_enabled(self, target_repo):
206 206 caching_enabled = self._get_general_setting(
207 207 target_repo, 'rhodecode_diff_cache')
208 208 log.debug('Diff caching enabled: %s', caching_enabled)
209 209 return caching_enabled
210 210
211 211 def _get_diffset(self, source_repo_name, source_repo,
212 212 source_ref_id, target_ref_id,
213 213 target_commit, source_commit, diff_limit, file_limit,
214 214 fulldiff, hide_whitespace_changes, diff_context):
215 215
216 216 vcs_diff = PullRequestModel().get_diff(
217 217 source_repo, source_ref_id, target_ref_id,
218 218 hide_whitespace_changes, diff_context)
219 219
220 220 diff_processor = diffs.DiffProcessor(
221 221 vcs_diff, format='newdiff', diff_limit=diff_limit,
222 222 file_limit=file_limit, show_full_diff=fulldiff)
223 223
224 224 _parsed = diff_processor.prepare()
225 225
226 226 diffset = codeblocks.DiffSet(
227 227 repo_name=self.db_repo_name,
228 228 source_repo_name=source_repo_name,
229 229 source_node_getter=codeblocks.diffset_node_getter(target_commit),
230 230 target_node_getter=codeblocks.diffset_node_getter(source_commit),
231 231 )
232 232 diffset = self.path_filter.render_patchset_filtered(
233 233 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
234 234
235 235 return diffset
236 236
237 237 def _get_range_diffset(self, source_scm, source_repo,
238 238 commit1, commit2, diff_limit, file_limit,
239 239 fulldiff, hide_whitespace_changes, diff_context):
240 240 vcs_diff = source_scm.get_diff(
241 241 commit1, commit2,
242 242 ignore_whitespace=hide_whitespace_changes,
243 243 context=diff_context)
244 244
245 245 diff_processor = diffs.DiffProcessor(
246 246 vcs_diff, format='newdiff', diff_limit=diff_limit,
247 247 file_limit=file_limit, show_full_diff=fulldiff)
248 248
249 249 _parsed = diff_processor.prepare()
250 250
251 251 diffset = codeblocks.DiffSet(
252 252 repo_name=source_repo.repo_name,
253 253 source_node_getter=codeblocks.diffset_node_getter(commit1),
254 254 target_node_getter=codeblocks.diffset_node_getter(commit2))
255 255
256 256 diffset = self.path_filter.render_patchset_filtered(
257 257 diffset, _parsed, commit1.raw_id, commit2.raw_id)
258 258
259 259 return diffset
260 260
261 261 @LoginRequired()
262 262 @HasRepoPermissionAnyDecorator(
263 263 'repository.read', 'repository.write', 'repository.admin')
264 264 @view_config(
265 265 route_name='pullrequest_show', request_method='GET',
266 266 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
267 267 def pull_request_show(self):
268 268 _ = self.request.translate
269 269 c = self.load_default_context()
270 270
271 271 pull_request = PullRequest.get_or_404(
272 272 self.request.matchdict['pull_request_id'])
273 273 pull_request_id = pull_request.pull_request_id
274 274
275 275 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
276 276 log.debug('show: forbidden because pull request is in state %s',
277 277 pull_request.pull_request_state)
278 278 msg = _(u'Cannot show pull requests in state other than `{}`. '
279 279 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
280 280 pull_request.pull_request_state)
281 281 h.flash(msg, category='error')
282 282 raise HTTPFound(h.route_path('pullrequest_show_all',
283 283 repo_name=self.db_repo_name))
284 284
285 285 version = self.request.GET.get('version')
286 286 from_version = self.request.GET.get('from_version') or version
287 287 merge_checks = self.request.GET.get('merge_checks')
288 288 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
289 289
290 290 # fetch global flags of ignore ws or context lines
291 291 diff_context = diffs.get_diff_context(self.request)
292 292 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
293 293
294 294 force_refresh = str2bool(self.request.GET.get('force_refresh'))
295 295
296 296 (pull_request_latest,
297 297 pull_request_at_ver,
298 298 pull_request_display_obj,
299 299 at_version) = PullRequestModel().get_pr_version(
300 300 pull_request_id, version=version)
301 301 pr_closed = pull_request_latest.is_closed()
302 302
303 303 if pr_closed and (version or from_version):
304 304 # not allow to browse versions
305 305 raise HTTPFound(h.route_path(
306 306 'pullrequest_show', repo_name=self.db_repo_name,
307 307 pull_request_id=pull_request_id))
308 308
309 309 versions = pull_request_display_obj.versions()
310 310 # used to store per-commit range diffs
311 311 c.changes = collections.OrderedDict()
312 312 c.range_diff_on = self.request.GET.get('range-diff') == "1"
313 313
314 314 c.at_version = at_version
315 315 c.at_version_num = (at_version
316 316 if at_version and at_version != 'latest'
317 317 else None)
318 318 c.at_version_pos = ChangesetComment.get_index_from_version(
319 319 c.at_version_num, versions)
320 320
321 321 (prev_pull_request_latest,
322 322 prev_pull_request_at_ver,
323 323 prev_pull_request_display_obj,
324 324 prev_at_version) = PullRequestModel().get_pr_version(
325 325 pull_request_id, version=from_version)
326 326
327 327 c.from_version = prev_at_version
328 328 c.from_version_num = (prev_at_version
329 329 if prev_at_version and prev_at_version != 'latest'
330 330 else None)
331 331 c.from_version_pos = ChangesetComment.get_index_from_version(
332 332 c.from_version_num, versions)
333 333
334 334 # define if we're in COMPARE mode or VIEW at version mode
335 335 compare = at_version != prev_at_version
336 336
337 337 # pull_requests repo_name we opened it against
338 338 # ie. target_repo must match
339 339 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
340 340 raise HTTPNotFound()
341 341
342 342 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
343 343 pull_request_at_ver)
344 344
345 345 c.pull_request = pull_request_display_obj
346 346 c.renderer = pull_request_at_ver.description_renderer or c.renderer
347 347 c.pull_request_latest = pull_request_latest
348 348
349 349 if compare or (at_version and not at_version == 'latest'):
350 350 c.allowed_to_change_status = False
351 351 c.allowed_to_update = False
352 352 c.allowed_to_merge = False
353 353 c.allowed_to_delete = False
354 354 c.allowed_to_comment = False
355 355 c.allowed_to_close = False
356 356 else:
357 357 can_change_status = PullRequestModel().check_user_change_status(
358 358 pull_request_at_ver, self._rhodecode_user)
359 359 c.allowed_to_change_status = can_change_status and not pr_closed
360 360
361 361 c.allowed_to_update = PullRequestModel().check_user_update(
362 362 pull_request_latest, self._rhodecode_user) and not pr_closed
363 363 c.allowed_to_merge = PullRequestModel().check_user_merge(
364 364 pull_request_latest, self._rhodecode_user) and not pr_closed
365 365 c.allowed_to_delete = PullRequestModel().check_user_delete(
366 366 pull_request_latest, self._rhodecode_user) and not pr_closed
367 367 c.allowed_to_comment = not pr_closed
368 368 c.allowed_to_close = c.allowed_to_merge and not pr_closed
369 369
370 370 c.forbid_adding_reviewers = False
371 371 c.forbid_author_to_review = False
372 372 c.forbid_commit_author_to_review = False
373 373
374 374 if pull_request_latest.reviewer_data and \
375 375 'rules' in pull_request_latest.reviewer_data:
376 376 rules = pull_request_latest.reviewer_data['rules'] or {}
377 377 try:
378 378 c.forbid_adding_reviewers = rules.get(
379 379 'forbid_adding_reviewers')
380 380 c.forbid_author_to_review = rules.get(
381 381 'forbid_author_to_review')
382 382 c.forbid_commit_author_to_review = rules.get(
383 383 'forbid_commit_author_to_review')
384 384 except Exception:
385 385 pass
386 386
387 387 # check merge capabilities
388 388 _merge_check = MergeCheck.validate(
389 389 pull_request_latest, auth_user=self._rhodecode_user,
390 390 translator=self.request.translate,
391 391 force_shadow_repo_refresh=force_refresh)
392 392 c.pr_merge_errors = _merge_check.error_details
393 393 c.pr_merge_possible = not _merge_check.failed
394 394 c.pr_merge_message = _merge_check.merge_msg
395 395
396 396 c.pr_merge_info = MergeCheck.get_merge_conditions(
397 397 pull_request_latest, translator=self.request.translate)
398 398
399 399 c.pull_request_review_status = _merge_check.review_status
400 400 if merge_checks:
401 401 self.request.override_renderer = \
402 402 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
403 403 return self._get_template_context(c)
404 404
405 405 comments_model = CommentsModel()
406 406
407 407 # reviewers and statuses
408 408 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
409 409 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
410 410
411 411 # GENERAL COMMENTS with versions #
412 412 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
413 413 q = q.order_by(ChangesetComment.comment_id.asc())
414 414 general_comments = q
415 415
416 416 # pick comments we want to render at current version
417 417 c.comment_versions = comments_model.aggregate_comments(
418 418 general_comments, versions, c.at_version_num)
419 419 c.comments = c.comment_versions[c.at_version_num]['until']
420 420
421 421 # INLINE COMMENTS with versions #
422 422 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
423 423 q = q.order_by(ChangesetComment.comment_id.asc())
424 424 inline_comments = q
425 425
426 426 c.inline_versions = comments_model.aggregate_comments(
427 427 inline_comments, versions, c.at_version_num, inline=True)
428 428
429 429 # TODOs
430 430 c.unresolved_comments = CommentsModel() \
431 431 .get_pull_request_unresolved_todos(pull_request)
432 432 c.resolved_comments = CommentsModel() \
433 433 .get_pull_request_resolved_todos(pull_request)
434 434
435 435 # inject latest version
436 436 latest_ver = PullRequest.get_pr_display_object(
437 437 pull_request_latest, pull_request_latest)
438 438
439 439 c.versions = versions + [latest_ver]
440 440
441 441 # if we use version, then do not show later comments
442 442 # than current version
443 443 display_inline_comments = collections.defaultdict(
444 444 lambda: collections.defaultdict(list))
445 445 for co in inline_comments:
446 446 if c.at_version_num:
447 447 # pick comments that are at least UPTO given version, so we
448 448 # don't render comments for higher version
449 449 should_render = co.pull_request_version_id and \
450 450 co.pull_request_version_id <= c.at_version_num
451 451 else:
452 452 # showing all, for 'latest'
453 453 should_render = True
454 454
455 455 if should_render:
456 456 display_inline_comments[co.f_path][co.line_no].append(co)
457 457
458 458 # load diff data into template context, if we use compare mode then
459 459 # diff is calculated based on changes between versions of PR
460 460
461 461 source_repo = pull_request_at_ver.source_repo
462 462 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
463 463
464 464 target_repo = pull_request_at_ver.target_repo
465 465 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
466 466
467 467 if compare:
468 468 # in compare switch the diff base to latest commit from prev version
469 469 target_ref_id = prev_pull_request_display_obj.revisions[0]
470 470
471 471 # despite opening commits for bookmarks/branches/tags, we always
472 472 # convert this to rev to prevent changes after bookmark or branch change
473 473 c.source_ref_type = 'rev'
474 474 c.source_ref = source_ref_id
475 475
476 476 c.target_ref_type = 'rev'
477 477 c.target_ref = target_ref_id
478 478
479 479 c.source_repo = source_repo
480 480 c.target_repo = target_repo
481 481
482 482 c.commit_ranges = []
483 483 source_commit = EmptyCommit()
484 484 target_commit = EmptyCommit()
485 485 c.missing_requirements = False
486 486
487 487 source_scm = source_repo.scm_instance()
488 488 target_scm = target_repo.scm_instance()
489 489
490 490 shadow_scm = None
491 491 try:
492 492 shadow_scm = pull_request_latest.get_shadow_repo()
493 493 except Exception:
494 494 log.debug('Failed to get shadow repo', exc_info=True)
495 495 # try first the existing source_repo, and then shadow
496 496 # repo if we can obtain one
497 497 commits_source_repo = source_scm or shadow_scm
498 498
499 499 c.commits_source_repo = commits_source_repo
500 500 c.ancestor = None # set it to None, to hide it from PR view
501 501
502 502 # empty version means latest, so we keep this to prevent
503 503 # double caching
504 504 version_normalized = version or 'latest'
505 505 from_version_normalized = from_version or 'latest'
506 506
507 507 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
508 508 cache_file_path = diff_cache_exist(
509 509 cache_path, 'pull_request', pull_request_id, version_normalized,
510 510 from_version_normalized, source_ref_id, target_ref_id,
511 511 hide_whitespace_changes, diff_context, c.fulldiff)
512 512
513 513 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
514 514 force_recache = self.get_recache_flag()
515 515
516 516 cached_diff = None
517 517 if caching_enabled:
518 518 cached_diff = load_cached_diff(cache_file_path)
519 519
520 520 has_proper_commit_cache = (
521 521 cached_diff and cached_diff.get('commits')
522 522 and len(cached_diff.get('commits', [])) == 5
523 523 and cached_diff.get('commits')[0]
524 524 and cached_diff.get('commits')[3])
525 525
526 526 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
527 527 diff_commit_cache = \
528 528 (ancestor_commit, commit_cache, missing_requirements,
529 529 source_commit, target_commit) = cached_diff['commits']
530 530 else:
531 531 diff_commit_cache = \
532 532 (ancestor_commit, commit_cache, missing_requirements,
533 533 source_commit, target_commit) = self.get_commits(
534 534 commits_source_repo,
535 535 pull_request_at_ver,
536 536 source_commit,
537 537 source_ref_id,
538 538 source_scm,
539 539 target_commit,
540 540 target_ref_id,
541 541 target_scm)
542 542
543 543 # register our commit range
544 544 for comm in commit_cache.values():
545 545 c.commit_ranges.append(comm)
546 546
547 547 c.missing_requirements = missing_requirements
548 548 c.ancestor_commit = ancestor_commit
549 549 c.statuses = source_repo.statuses(
550 550 [x.raw_id for x in c.commit_ranges])
551 551
552 552 # auto collapse if we have more than limit
553 553 collapse_limit = diffs.DiffProcessor._collapse_commits_over
554 554 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
555 555 c.compare_mode = compare
556 556
557 557 # diff_limit is the old behavior, will cut off the whole diff
558 558 # if the limit is applied otherwise will just hide the
559 559 # big files from the front-end
560 560 diff_limit = c.visual.cut_off_limit_diff
561 561 file_limit = c.visual.cut_off_limit_file
562 562
563 563 c.missing_commits = False
564 564 if (c.missing_requirements
565 565 or isinstance(source_commit, EmptyCommit)
566 566 or source_commit == target_commit):
567 567
568 568 c.missing_commits = True
569 569 else:
570 570 c.inline_comments = display_inline_comments
571 571
572 572 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
573 573 if not force_recache and has_proper_diff_cache:
574 574 c.diffset = cached_diff['diff']
575 575 (ancestor_commit, commit_cache, missing_requirements,
576 576 source_commit, target_commit) = cached_diff['commits']
577 577 else:
578 578 c.diffset = self._get_diffset(
579 579 c.source_repo.repo_name, commits_source_repo,
580 580 source_ref_id, target_ref_id,
581 581 target_commit, source_commit,
582 582 diff_limit, file_limit, c.fulldiff,
583 583 hide_whitespace_changes, diff_context)
584 584
585 585 # save cached diff
586 586 if caching_enabled:
587 587 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
588 588
589 589 c.limited_diff = c.diffset.limited_diff
590 590
591 591 # calculate removed files that are bound to comments
592 592 comment_deleted_files = [
593 593 fname for fname in display_inline_comments
594 594 if fname not in c.diffset.file_stats]
595 595
596 596 c.deleted_files_comments = collections.defaultdict(dict)
597 597 for fname, per_line_comments in display_inline_comments.items():
598 598 if fname in comment_deleted_files:
599 599 c.deleted_files_comments[fname]['stats'] = 0
600 600 c.deleted_files_comments[fname]['comments'] = list()
601 601 for lno, comments in per_line_comments.items():
602 602 c.deleted_files_comments[fname]['comments'].extend(comments)
603 603
604 604 # maybe calculate the range diff
605 605 if c.range_diff_on:
606 606 # TODO(marcink): set whitespace/context
607 607 context_lcl = 3
608 608 ign_whitespace_lcl = False
609 609
610 610 for commit in c.commit_ranges:
611 611 commit2 = commit
612 612 commit1 = commit.first_parent
613 613
614 614 range_diff_cache_file_path = diff_cache_exist(
615 615 cache_path, 'diff', commit.raw_id,
616 616 ign_whitespace_lcl, context_lcl, c.fulldiff)
617 617
618 618 cached_diff = None
619 619 if caching_enabled:
620 620 cached_diff = load_cached_diff(range_diff_cache_file_path)
621 621
622 622 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
623 623 if not force_recache and has_proper_diff_cache:
624 624 diffset = cached_diff['diff']
625 625 else:
626 626 diffset = self._get_range_diffset(
627 627 source_scm, source_repo,
628 628 commit1, commit2, diff_limit, file_limit,
629 629 c.fulldiff, ign_whitespace_lcl, context_lcl
630 630 )
631 631
632 632 # save cached diff
633 633 if caching_enabled:
634 634 cache_diff(range_diff_cache_file_path, diffset, None)
635 635
636 636 c.changes[commit.raw_id] = diffset
637 637
638 638 # this is a hack to properly display links, when creating PR, the
639 639 # compare view and others uses different notation, and
640 640 # compare_commits.mako renders links based on the target_repo.
641 641 # We need to swap that here to generate it properly on the html side
642 642 c.target_repo = c.source_repo
643 643
644 644 c.commit_statuses = ChangesetStatus.STATUSES
645 645
646 646 c.show_version_changes = not pr_closed
647 647 if c.show_version_changes:
648 648 cur_obj = pull_request_at_ver
649 649 prev_obj = prev_pull_request_at_ver
650 650
651 651 old_commit_ids = prev_obj.revisions
652 652 new_commit_ids = cur_obj.revisions
653 653 commit_changes = PullRequestModel()._calculate_commit_id_changes(
654 654 old_commit_ids, new_commit_ids)
655 655 c.commit_changes_summary = commit_changes
656 656
657 657 # calculate the diff for commits between versions
658 658 c.commit_changes = []
659 659 mark = lambda cs, fw: list(
660 660 h.itertools.izip_longest([], cs, fillvalue=fw))
661 661 for c_type, raw_id in mark(commit_changes.added, 'a') \
662 662 + mark(commit_changes.removed, 'r') \
663 663 + mark(commit_changes.common, 'c'):
664 664
665 665 if raw_id in commit_cache:
666 666 commit = commit_cache[raw_id]
667 667 else:
668 668 try:
669 669 commit = commits_source_repo.get_commit(raw_id)
670 670 except CommitDoesNotExistError:
671 671 # in case we fail extracting still use "dummy" commit
672 672 # for display in commit diff
673 673 commit = h.AttributeDict(
674 674 {'raw_id': raw_id,
675 675 'message': 'EMPTY or MISSING COMMIT'})
676 676 c.commit_changes.append([c_type, commit])
677 677
678 678 # current user review statuses for each version
679 679 c.review_versions = {}
680 680 if self._rhodecode_user.user_id in allowed_reviewers:
681 681 for co in general_comments:
682 682 if co.author.user_id == self._rhodecode_user.user_id:
683 683 status = co.status_change
684 684 if status:
685 685 _ver_pr = status[0].comment.pull_request_version_id
686 686 c.review_versions[_ver_pr] = status[0]
687 687
688 688 return self._get_template_context(c)
689 689
690 690 def get_commits(
691 691 self, commits_source_repo, pull_request_at_ver, source_commit,
692 692 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
693 693 commit_cache = collections.OrderedDict()
694 694 missing_requirements = False
695 695 try:
696 696 pre_load = ["author", "date", "message", "branch", "parents"]
697 697 show_revs = pull_request_at_ver.revisions
698 698 for rev in show_revs:
699 699 comm = commits_source_repo.get_commit(
700 700 commit_id=rev, pre_load=pre_load)
701 701 commit_cache[comm.raw_id] = comm
702 702
703 703 # Order here matters, we first need to get target, and then
704 704 # the source
705 705 target_commit = commits_source_repo.get_commit(
706 706 commit_id=safe_str(target_ref_id))
707 707
708 708 source_commit = commits_source_repo.get_commit(
709 709 commit_id=safe_str(source_ref_id))
710 710 except CommitDoesNotExistError:
711 711 log.warning(
712 712 'Failed to get commit from `{}` repo'.format(
713 713 commits_source_repo), exc_info=True)
714 714 except RepositoryRequirementError:
715 715 log.warning(
716 716 'Failed to get all required data from repo', exc_info=True)
717 717 missing_requirements = True
718 718 ancestor_commit = None
719 719 try:
720 720 ancestor_id = source_scm.get_common_ancestor(
721 721 source_commit.raw_id, target_commit.raw_id, target_scm)
722 722 ancestor_commit = source_scm.get_commit(ancestor_id)
723 723 except Exception:
724 724 ancestor_commit = None
725 725 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
726 726
727 727 def assure_not_empty_repo(self):
728 728 _ = self.request.translate
729 729
730 730 try:
731 731 self.db_repo.scm_instance().get_commit()
732 732 except EmptyRepositoryError:
733 733 h.flash(h.literal(_('There are no commits yet')),
734 734 category='warning')
735 735 raise HTTPFound(
736 736 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
737 737
738 738 @LoginRequired()
739 739 @NotAnonymous()
740 740 @HasRepoPermissionAnyDecorator(
741 741 'repository.read', 'repository.write', 'repository.admin')
742 742 @view_config(
743 743 route_name='pullrequest_new', request_method='GET',
744 744 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
745 745 def pull_request_new(self):
746 746 _ = self.request.translate
747 747 c = self.load_default_context()
748 748
749 749 self.assure_not_empty_repo()
750 750 source_repo = self.db_repo
751 751
752 752 commit_id = self.request.GET.get('commit')
753 753 branch_ref = self.request.GET.get('branch')
754 754 bookmark_ref = self.request.GET.get('bookmark')
755 755
756 756 try:
757 757 source_repo_data = PullRequestModel().generate_repo_data(
758 758 source_repo, commit_id=commit_id,
759 759 branch=branch_ref, bookmark=bookmark_ref,
760 760 translator=self.request.translate)
761 761 except CommitDoesNotExistError as e:
762 762 log.exception(e)
763 763 h.flash(_('Commit does not exist'), 'error')
764 764 raise HTTPFound(
765 765 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
766 766
767 767 default_target_repo = source_repo
768 768
769 769 if source_repo.parent and c.has_origin_repo_read_perm:
770 770 parent_vcs_obj = source_repo.parent.scm_instance()
771 771 if parent_vcs_obj and not parent_vcs_obj.is_empty():
772 772 # change default if we have a parent repo
773 773 default_target_repo = source_repo.parent
774 774
775 775 target_repo_data = PullRequestModel().generate_repo_data(
776 776 default_target_repo, translator=self.request.translate)
777 777
778 778 selected_source_ref = source_repo_data['refs']['selected_ref']
779 779 title_source_ref = ''
780 780 if selected_source_ref:
781 781 title_source_ref = selected_source_ref.split(':', 2)[1]
782 782 c.default_title = PullRequestModel().generate_pullrequest_title(
783 783 source=source_repo.repo_name,
784 784 source_ref=title_source_ref,
785 785 target=default_target_repo.repo_name
786 786 )
787 787
788 788 c.default_repo_data = {
789 789 'source_repo_name': source_repo.repo_name,
790 790 'source_refs_json': json.dumps(source_repo_data),
791 791 'target_repo_name': default_target_repo.repo_name,
792 792 'target_refs_json': json.dumps(target_repo_data),
793 793 }
794 794 c.default_source_ref = selected_source_ref
795 795
796 796 return self._get_template_context(c)
797 797
798 798 @LoginRequired()
799 799 @NotAnonymous()
800 800 @HasRepoPermissionAnyDecorator(
801 801 'repository.read', 'repository.write', 'repository.admin')
802 802 @view_config(
803 803 route_name='pullrequest_repo_refs', request_method='GET',
804 804 renderer='json_ext', xhr=True)
805 805 def pull_request_repo_refs(self):
806 806 self.load_default_context()
807 807 target_repo_name = self.request.matchdict['target_repo_name']
808 808 repo = Repository.get_by_repo_name(target_repo_name)
809 809 if not repo:
810 810 raise HTTPNotFound()
811 811
812 812 target_perm = HasRepoPermissionAny(
813 813 'repository.read', 'repository.write', 'repository.admin')(
814 814 target_repo_name)
815 815 if not target_perm:
816 816 raise HTTPNotFound()
817 817
818 818 return PullRequestModel().generate_repo_data(
819 819 repo, translator=self.request.translate)
820 820
821 821 @LoginRequired()
822 822 @NotAnonymous()
823 823 @HasRepoPermissionAnyDecorator(
824 824 'repository.read', 'repository.write', 'repository.admin')
825 825 @view_config(
826 826 route_name='pullrequest_repo_targets', request_method='GET',
827 827 renderer='json_ext', xhr=True)
828 828 def pullrequest_repo_targets(self):
829 829 _ = self.request.translate
830 830 filter_query = self.request.GET.get('query')
831 831
832 832 # get the parents
833 833 parent_target_repos = []
834 834 if self.db_repo.parent:
835 835 parents_query = Repository.query() \
836 836 .order_by(func.length(Repository.repo_name)) \
837 837 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
838 838
839 839 if filter_query:
840 840 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
841 841 parents_query = parents_query.filter(
842 842 Repository.repo_name.ilike(ilike_expression))
843 843 parents = parents_query.limit(20).all()
844 844
845 845 for parent in parents:
846 846 parent_vcs_obj = parent.scm_instance()
847 847 if parent_vcs_obj and not parent_vcs_obj.is_empty():
848 848 parent_target_repos.append(parent)
849 849
850 850 # get other forks, and repo itself
851 851 query = Repository.query() \
852 852 .order_by(func.length(Repository.repo_name)) \
853 853 .filter(
854 854 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
855 855 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
856 856 ) \
857 857 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
858 858
859 859 if filter_query:
860 860 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
861 861 query = query.filter(Repository.repo_name.ilike(ilike_expression))
862 862
863 863 limit = max(20 - len(parent_target_repos), 5) # not less then 5
864 864 target_repos = query.limit(limit).all()
865 865
866 866 all_target_repos = target_repos + parent_target_repos
867 867
868 868 repos = []
869 869 # This checks permissions to the repositories
870 870 for obj in ScmModel().get_repos(all_target_repos):
871 871 repos.append({
872 872 'id': obj['name'],
873 873 'text': obj['name'],
874 874 'type': 'repo',
875 875 'repo_id': obj['dbrepo']['repo_id'],
876 876 'repo_type': obj['dbrepo']['repo_type'],
877 877 'private': obj['dbrepo']['private'],
878 878
879 879 })
880 880
881 881 data = {
882 882 'more': False,
883 883 'results': [{
884 884 'text': _('Repositories'),
885 885 'children': repos
886 886 }] if repos else []
887 887 }
888 888 return data
889 889
890 890 @LoginRequired()
891 891 @NotAnonymous()
892 892 @HasRepoPermissionAnyDecorator(
893 893 'repository.read', 'repository.write', 'repository.admin')
894 894 @CSRFRequired()
895 895 @view_config(
896 896 route_name='pullrequest_create', request_method='POST',
897 897 renderer=None)
898 898 def pull_request_create(self):
899 899 _ = self.request.translate
900 900 self.assure_not_empty_repo()
901 901 self.load_default_context()
902 902
903 903 controls = peppercorn.parse(self.request.POST.items())
904 904
905 905 try:
906 906 form = PullRequestForm(
907 907 self.request.translate, self.db_repo.repo_id)()
908 908 _form = form.to_python(controls)
909 909 except formencode.Invalid as errors:
910 910 if errors.error_dict.get('revisions'):
911 911 msg = 'Revisions: %s' % errors.error_dict['revisions']
912 912 elif errors.error_dict.get('pullrequest_title'):
913 913 msg = errors.error_dict.get('pullrequest_title')
914 914 else:
915 915 msg = _('Error creating pull request: {}').format(errors)
916 916 log.exception(msg)
917 917 h.flash(msg, 'error')
918 918
919 919 # would rather just go back to form ...
920 920 raise HTTPFound(
921 921 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
922 922
923 923 source_repo = _form['source_repo']
924 924 source_ref = _form['source_ref']
925 925 target_repo = _form['target_repo']
926 926 target_ref = _form['target_ref']
927 927 commit_ids = _form['revisions'][::-1]
928 928
929 929 # find the ancestor for this pr
930 930 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
931 931 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
932 932
933 933 if not (source_db_repo or target_db_repo):
934 934 h.flash(_('source_repo or target repo not found'), category='error')
935 935 raise HTTPFound(
936 936 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
937 937
938 938 # re-check permissions again here
939 939 # source_repo we must have read permissions
940 940
941 941 source_perm = HasRepoPermissionAny(
942 942 'repository.read', 'repository.write', 'repository.admin')(
943 943 source_db_repo.repo_name)
944 944 if not source_perm:
945 945 msg = _('Not Enough permissions to source repo `{}`.'.format(
946 946 source_db_repo.repo_name))
947 947 h.flash(msg, category='error')
948 948 # copy the args back to redirect
949 949 org_query = self.request.GET.mixed()
950 950 raise HTTPFound(
951 951 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
952 952 _query=org_query))
953 953
954 954 # target repo we must have read permissions, and also later on
955 955 # we want to check branch permissions here
956 956 target_perm = HasRepoPermissionAny(
957 957 'repository.read', 'repository.write', 'repository.admin')(
958 958 target_db_repo.repo_name)
959 959 if not target_perm:
960 960 msg = _('Not Enough permissions to target repo `{}`.'.format(
961 961 target_db_repo.repo_name))
962 962 h.flash(msg, category='error')
963 963 # copy the args back to redirect
964 964 org_query = self.request.GET.mixed()
965 965 raise HTTPFound(
966 966 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
967 967 _query=org_query))
968 968
969 969 source_scm = source_db_repo.scm_instance()
970 970 target_scm = target_db_repo.scm_instance()
971 971
972 972 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
973 973 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
974 974
975 975 ancestor = source_scm.get_common_ancestor(
976 976 source_commit.raw_id, target_commit.raw_id, target_scm)
977 977
978 978 # recalculate target ref based on ancestor
979 979 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
980 980 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
981 981
982 982 get_default_reviewers_data, validate_default_reviewers = \
983 983 PullRequestModel().get_reviewer_functions()
984 984
985 985 # recalculate reviewers logic, to make sure we can validate this
986 986 reviewer_rules = get_default_reviewers_data(
987 987 self._rhodecode_db_user, source_db_repo,
988 988 source_commit, target_db_repo, target_commit)
989 989
990 990 given_reviewers = _form['review_members']
991 991 reviewers = validate_default_reviewers(
992 992 given_reviewers, reviewer_rules)
993 993
994 994 pullrequest_title = _form['pullrequest_title']
995 995 title_source_ref = source_ref.split(':', 2)[1]
996 996 if not pullrequest_title:
997 997 pullrequest_title = PullRequestModel().generate_pullrequest_title(
998 998 source=source_repo,
999 999 source_ref=title_source_ref,
1000 1000 target=target_repo
1001 1001 )
1002 1002
1003 1003 description = _form['pullrequest_desc']
1004 1004 description_renderer = _form['description_renderer']
1005 1005
1006 1006 try:
1007 1007 pull_request = PullRequestModel().create(
1008 1008 created_by=self._rhodecode_user.user_id,
1009 1009 source_repo=source_repo,
1010 1010 source_ref=source_ref,
1011 1011 target_repo=target_repo,
1012 1012 target_ref=target_ref,
1013 1013 revisions=commit_ids,
1014 1014 reviewers=reviewers,
1015 1015 title=pullrequest_title,
1016 1016 description=description,
1017 1017 description_renderer=description_renderer,
1018 1018 reviewer_data=reviewer_rules,
1019 1019 auth_user=self._rhodecode_user
1020 1020 )
1021 1021 Session().commit()
1022 1022
1023 1023 h.flash(_('Successfully opened new pull request'),
1024 1024 category='success')
1025 1025 except Exception:
1026 1026 msg = _('Error occurred during creation of this pull request.')
1027 1027 log.exception(msg)
1028 1028 h.flash(msg, category='error')
1029 1029
1030 1030 # copy the args back to redirect
1031 1031 org_query = self.request.GET.mixed()
1032 1032 raise HTTPFound(
1033 1033 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1034 1034 _query=org_query))
1035 1035
1036 1036 raise HTTPFound(
1037 1037 h.route_path('pullrequest_show', repo_name=target_repo,
1038 1038 pull_request_id=pull_request.pull_request_id))
1039 1039
1040 1040 @LoginRequired()
1041 1041 @NotAnonymous()
1042 1042 @HasRepoPermissionAnyDecorator(
1043 1043 'repository.read', 'repository.write', 'repository.admin')
1044 1044 @CSRFRequired()
1045 1045 @view_config(
1046 1046 route_name='pullrequest_update', request_method='POST',
1047 1047 renderer='json_ext')
1048 1048 def pull_request_update(self):
1049 1049 pull_request = PullRequest.get_or_404(
1050 1050 self.request.matchdict['pull_request_id'])
1051 1051 _ = self.request.translate
1052 1052
1053 1053 self.load_default_context()
1054 1054
1055 1055 if pull_request.is_closed():
1056 1056 log.debug('update: forbidden because pull request is closed')
1057 1057 msg = _(u'Cannot update closed pull requests.')
1058 1058 h.flash(msg, category='error')
1059 1059 return True
1060 1060
1061 1061 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
1062 1062 log.debug('update: forbidden because pull request is in state %s',
1063 1063 pull_request.pull_request_state)
1064 1064 msg = _(u'Cannot update pull requests in state other than `{}`. '
1065 1065 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1066 1066 pull_request.pull_request_state)
1067 1067 h.flash(msg, category='error')
1068 1068 return True
1069 1069
1070 1070 # only owner or admin can update it
1071 1071 allowed_to_update = PullRequestModel().check_user_update(
1072 1072 pull_request, self._rhodecode_user)
1073 1073 if allowed_to_update:
1074 1074 controls = peppercorn.parse(self.request.POST.items())
1075 1075
1076 1076 if 'review_members' in controls:
1077 1077 self._update_reviewers(
1078 1078 pull_request, controls['review_members'],
1079 1079 pull_request.reviewer_data)
1080 1080 elif str2bool(self.request.POST.get('update_commits', 'false')):
1081 1081 self._update_commits(pull_request)
1082 1082 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1083 1083 self._edit_pull_request(pull_request)
1084 1084 else:
1085 1085 raise HTTPBadRequest()
1086 1086 return True
1087 1087 raise HTTPForbidden()
1088 1088
1089 1089 def _edit_pull_request(self, pull_request):
1090 1090 _ = self.request.translate
1091 1091
1092 1092 try:
1093 1093 PullRequestModel().edit(
1094 1094 pull_request,
1095 1095 self.request.POST.get('title'),
1096 1096 self.request.POST.get('description'),
1097 1097 self.request.POST.get('description_renderer'),
1098 1098 self._rhodecode_user)
1099 1099 except ValueError:
1100 1100 msg = _(u'Cannot update closed pull requests.')
1101 1101 h.flash(msg, category='error')
1102 1102 return
1103 1103 else:
1104 1104 Session().commit()
1105 1105
1106 1106 msg = _(u'Pull request title & description updated.')
1107 1107 h.flash(msg, category='success')
1108 1108 return
1109 1109
1110 1110 def _update_commits(self, pull_request):
1111 1111 _ = self.request.translate
1112 1112
1113 1113 with pull_request.set_state(PullRequest.STATE_UPDATING):
1114 1114 resp = PullRequestModel().update_commits(pull_request)
1115 1115
1116 1116 if resp.executed:
1117 1117
1118 1118 if resp.target_changed and resp.source_changed:
1119 1119 changed = 'target and source repositories'
1120 1120 elif resp.target_changed and not resp.source_changed:
1121 1121 changed = 'target repository'
1122 1122 elif not resp.target_changed and resp.source_changed:
1123 1123 changed = 'source repository'
1124 1124 else:
1125 1125 changed = 'nothing'
1126 1126
1127 1127 msg = _(u'Pull request updated to "{source_commit_id}" with '
1128 1128 u'{count_added} added, {count_removed} removed commits. '
1129 1129 u'Source of changes: {change_source}')
1130 1130 msg = msg.format(
1131 1131 source_commit_id=pull_request.source_ref_parts.commit_id,
1132 1132 count_added=len(resp.changes.added),
1133 1133 count_removed=len(resp.changes.removed),
1134 1134 change_source=changed)
1135 1135 h.flash(msg, category='success')
1136 1136
1137 1137 channel = '/repo${}$/pr/{}'.format(
1138 1138 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1139 1139 message = msg + (
1140 1140 ' - <a onclick="window.location.reload()">'
1141 1141 '<strong>{}</strong></a>'.format(_('Reload page')))
1142 1142 channelstream.post_message(
1143 1143 channel, message, self._rhodecode_user.username,
1144 1144 registry=self.request.registry)
1145 1145 else:
1146 1146 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1147 1147 warning_reasons = [
1148 1148 UpdateFailureReason.NO_CHANGE,
1149 1149 UpdateFailureReason.WRONG_REF_TYPE,
1150 1150 ]
1151 1151 category = 'warning' if resp.reason in warning_reasons else 'error'
1152 1152 h.flash(msg, category=category)
1153 1153
1154 1154 @LoginRequired()
1155 1155 @NotAnonymous()
1156 1156 @HasRepoPermissionAnyDecorator(
1157 1157 'repository.read', 'repository.write', 'repository.admin')
1158 1158 @CSRFRequired()
1159 1159 @view_config(
1160 1160 route_name='pullrequest_merge', request_method='POST',
1161 1161 renderer='json_ext')
1162 1162 def pull_request_merge(self):
1163 1163 """
1164 1164 Merge will perform a server-side merge of the specified
1165 1165 pull request, if the pull request is approved and mergeable.
1166 1166 After successful merging, the pull request is automatically
1167 1167 closed, with a relevant comment.
1168 1168 """
1169 1169 pull_request = PullRequest.get_or_404(
1170 1170 self.request.matchdict['pull_request_id'])
1171 1171 _ = self.request.translate
1172 1172
1173 1173 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
1174 1174 log.debug('show: forbidden because pull request is in state %s',
1175 1175 pull_request.pull_request_state)
1176 1176 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1177 1177 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1178 1178 pull_request.pull_request_state)
1179 1179 h.flash(msg, category='error')
1180 1180 raise HTTPFound(
1181 1181 h.route_path('pullrequest_show',
1182 1182 repo_name=pull_request.target_repo.repo_name,
1183 1183 pull_request_id=pull_request.pull_request_id))
1184 1184
1185 1185 self.load_default_context()
1186 1186
1187 1187 with pull_request.set_state(PullRequest.STATE_UPDATING):
1188 1188 check = MergeCheck.validate(
1189 1189 pull_request, auth_user=self._rhodecode_user,
1190 1190 translator=self.request.translate)
1191 1191 merge_possible = not check.failed
1192 1192
1193 1193 for err_type, error_msg in check.errors:
1194 1194 h.flash(error_msg, category=err_type)
1195 1195
1196 1196 if merge_possible:
1197 1197 log.debug("Pre-conditions checked, trying to merge.")
1198 1198 extras = vcs_operation_context(
1199 1199 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1200 1200 username=self._rhodecode_db_user.username, action='push',
1201 1201 scm=pull_request.target_repo.repo_type)
1202 1202 with pull_request.set_state(PullRequest.STATE_UPDATING):
1203 1203 self._merge_pull_request(
1204 1204 pull_request, self._rhodecode_db_user, extras)
1205 1205 else:
1206 1206 log.debug("Pre-conditions failed, NOT merging.")
1207 1207
1208 1208 raise HTTPFound(
1209 1209 h.route_path('pullrequest_show',
1210 1210 repo_name=pull_request.target_repo.repo_name,
1211 1211 pull_request_id=pull_request.pull_request_id))
1212 1212
1213 1213 def _merge_pull_request(self, pull_request, user, extras):
1214 1214 _ = self.request.translate
1215 1215 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1216 1216
1217 1217 if merge_resp.executed:
1218 1218 log.debug("The merge was successful, closing the pull request.")
1219 1219 PullRequestModel().close_pull_request(
1220 1220 pull_request.pull_request_id, user)
1221 1221 Session().commit()
1222 1222 msg = _('Pull request was successfully merged and closed.')
1223 1223 h.flash(msg, category='success')
1224 1224 else:
1225 1225 log.debug(
1226 1226 "The merge was not successful. Merge response: %s", merge_resp)
1227 1227 msg = merge_resp.merge_status_message
1228 1228 h.flash(msg, category='error')
1229 1229
1230 1230 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1231 1231 _ = self.request.translate
1232 1232
1233 1233 get_default_reviewers_data, validate_default_reviewers = \
1234 1234 PullRequestModel().get_reviewer_functions()
1235 1235
1236 1236 try:
1237 1237 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1238 1238 except ValueError as e:
1239 1239 log.error('Reviewers Validation: {}'.format(e))
1240 1240 h.flash(e, category='error')
1241 1241 return
1242 1242
1243 1243 old_calculated_status = pull_request.calculated_review_status()
1244 1244 PullRequestModel().update_reviewers(
1245 1245 pull_request, reviewers, self._rhodecode_user)
1246 1246 h.flash(_('Pull request reviewers updated.'), category='success')
1247 1247 Session().commit()
1248 1248
1249 1249 # trigger status changed if change in reviewers changes the status
1250 1250 calculated_status = pull_request.calculated_review_status()
1251 1251 if old_calculated_status != calculated_status:
1252 1252 PullRequestModel().trigger_pull_request_hook(
1253 1253 pull_request, self._rhodecode_user, 'review_status_change',
1254 1254 data={'status': calculated_status})
1255 1255
1256 1256 @LoginRequired()
1257 1257 @NotAnonymous()
1258 1258 @HasRepoPermissionAnyDecorator(
1259 1259 'repository.read', 'repository.write', 'repository.admin')
1260 1260 @CSRFRequired()
1261 1261 @view_config(
1262 1262 route_name='pullrequest_delete', request_method='POST',
1263 1263 renderer='json_ext')
1264 1264 def pull_request_delete(self):
1265 1265 _ = self.request.translate
1266 1266
1267 1267 pull_request = PullRequest.get_or_404(
1268 1268 self.request.matchdict['pull_request_id'])
1269 1269 self.load_default_context()
1270 1270
1271 1271 pr_closed = pull_request.is_closed()
1272 1272 allowed_to_delete = PullRequestModel().check_user_delete(
1273 1273 pull_request, self._rhodecode_user) and not pr_closed
1274 1274
1275 1275 # only owner can delete it !
1276 1276 if allowed_to_delete:
1277 1277 PullRequestModel().delete(pull_request, self._rhodecode_user)
1278 1278 Session().commit()
1279 1279 h.flash(_('Successfully deleted pull request'),
1280 1280 category='success')
1281 1281 raise HTTPFound(h.route_path('pullrequest_show_all',
1282 1282 repo_name=self.db_repo_name))
1283 1283
1284 1284 log.warning('user %s tried to delete pull request without access',
1285 1285 self._rhodecode_user)
1286 1286 raise HTTPNotFound()
1287 1287
1288 1288 @LoginRequired()
1289 1289 @NotAnonymous()
1290 1290 @HasRepoPermissionAnyDecorator(
1291 1291 'repository.read', 'repository.write', 'repository.admin')
1292 1292 @CSRFRequired()
1293 1293 @view_config(
1294 1294 route_name='pullrequest_comment_create', request_method='POST',
1295 1295 renderer='json_ext')
1296 1296 def pull_request_comment_create(self):
1297 1297 _ = self.request.translate
1298 1298
1299 1299 pull_request = PullRequest.get_or_404(
1300 1300 self.request.matchdict['pull_request_id'])
1301 1301 pull_request_id = pull_request.pull_request_id
1302 1302
1303 1303 if pull_request.is_closed():
1304 1304 log.debug('comment: forbidden because pull request is closed')
1305 1305 raise HTTPForbidden()
1306 1306
1307 1307 allowed_to_comment = PullRequestModel().check_user_comment(
1308 1308 pull_request, self._rhodecode_user)
1309 1309 if not allowed_to_comment:
1310 1310 log.debug(
1311 1311 'comment: forbidden because pull request is from forbidden repo')
1312 1312 raise HTTPForbidden()
1313 1313
1314 1314 c = self.load_default_context()
1315 1315
1316 1316 status = self.request.POST.get('changeset_status', None)
1317 1317 text = self.request.POST.get('text')
1318 1318 comment_type = self.request.POST.get('comment_type')
1319 1319 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1320 1320 close_pull_request = self.request.POST.get('close_pull_request')
1321 1321
1322 1322 # the logic here should work like following, if we submit close
1323 1323 # pr comment, use `close_pull_request_with_comment` function
1324 1324 # else handle regular comment logic
1325 1325
1326 1326 if close_pull_request:
1327 1327 # only owner or admin or person with write permissions
1328 1328 allowed_to_close = PullRequestModel().check_user_update(
1329 1329 pull_request, self._rhodecode_user)
1330 1330 if not allowed_to_close:
1331 1331 log.debug('comment: forbidden because not allowed to close '
1332 1332 'pull request %s', pull_request_id)
1333 1333 raise HTTPForbidden()
1334 1334
1335 1335 # This also triggers `review_status_change`
1336 1336 comment, status = PullRequestModel().close_pull_request_with_comment(
1337 1337 pull_request, self._rhodecode_user, self.db_repo, message=text,
1338 1338 auth_user=self._rhodecode_user)
1339 1339 Session().flush()
1340 1340
1341 1341 PullRequestModel().trigger_pull_request_hook(
1342 1342 pull_request, self._rhodecode_user, 'comment',
1343 1343 data={'comment': comment})
1344 1344
1345 1345 else:
1346 1346 # regular comment case, could be inline, or one with status.
1347 1347 # for that one we check also permissions
1348 1348
1349 1349 allowed_to_change_status = PullRequestModel().check_user_change_status(
1350 1350 pull_request, self._rhodecode_user)
1351 1351
1352 1352 if status and allowed_to_change_status:
1353 1353 message = (_('Status change %(transition_icon)s %(status)s')
1354 1354 % {'transition_icon': '>',
1355 1355 'status': ChangesetStatus.get_status_lbl(status)})
1356 1356 text = text or message
1357 1357
1358 1358 comment = CommentsModel().create(
1359 1359 text=text,
1360 1360 repo=self.db_repo.repo_id,
1361 1361 user=self._rhodecode_user.user_id,
1362 1362 pull_request=pull_request,
1363 1363 f_path=self.request.POST.get('f_path'),
1364 1364 line_no=self.request.POST.get('line'),
1365 1365 status_change=(ChangesetStatus.get_status_lbl(status)
1366 1366 if status and allowed_to_change_status else None),
1367 1367 status_change_type=(status
1368 1368 if status and allowed_to_change_status else None),
1369 1369 comment_type=comment_type,
1370 1370 resolves_comment_id=resolves_comment_id,
1371 1371 auth_user=self._rhodecode_user
1372 1372 )
1373 1373
1374 1374 if allowed_to_change_status:
1375 1375 # calculate old status before we change it
1376 1376 old_calculated_status = pull_request.calculated_review_status()
1377 1377
1378 1378 # get status if set !
1379 1379 if status:
1380 1380 ChangesetStatusModel().set_status(
1381 1381 self.db_repo.repo_id,
1382 1382 status,
1383 1383 self._rhodecode_user.user_id,
1384 1384 comment,
1385 1385 pull_request=pull_request
1386 1386 )
1387 1387
1388 1388 Session().flush()
1389 1389 # this is somehow required to get access to some relationship
1390 1390 # loaded on comment
1391 1391 Session().refresh(comment)
1392 1392
1393 1393 PullRequestModel().trigger_pull_request_hook(
1394 1394 pull_request, self._rhodecode_user, 'comment',
1395 1395 data={'comment': comment})
1396 1396
1397 1397 # we now calculate the status of pull request, and based on that
1398 1398 # calculation we set the commits status
1399 1399 calculated_status = pull_request.calculated_review_status()
1400 1400 if old_calculated_status != calculated_status:
1401 1401 PullRequestModel().trigger_pull_request_hook(
1402 1402 pull_request, self._rhodecode_user, 'review_status_change',
1403 1403 data={'status': calculated_status})
1404 1404
1405 1405 Session().commit()
1406 1406
1407 1407 data = {
1408 1408 'target_id': h.safeid(h.safe_unicode(
1409 1409 self.request.POST.get('f_path'))),
1410 1410 }
1411 1411 if comment:
1412 1412 c.co = comment
1413 1413 rendered_comment = render(
1414 1414 'rhodecode:templates/changeset/changeset_comment_block.mako',
1415 1415 self._get_template_context(c), self.request)
1416 1416
1417 1417 data.update(comment.get_dict())
1418 1418 data.update({'rendered_text': rendered_comment})
1419 1419
1420 1420 return data
1421 1421
1422 1422 @LoginRequired()
1423 1423 @NotAnonymous()
1424 1424 @HasRepoPermissionAnyDecorator(
1425 1425 'repository.read', 'repository.write', 'repository.admin')
1426 1426 @CSRFRequired()
1427 1427 @view_config(
1428 1428 route_name='pullrequest_comment_delete', request_method='POST',
1429 1429 renderer='json_ext')
1430 1430 def pull_request_comment_delete(self):
1431 1431 pull_request = PullRequest.get_or_404(
1432 1432 self.request.matchdict['pull_request_id'])
1433 1433
1434 1434 comment = ChangesetComment.get_or_404(
1435 1435 self.request.matchdict['comment_id'])
1436 1436 comment_id = comment.comment_id
1437 1437
1438 1438 if pull_request.is_closed():
1439 1439 log.debug('comment: forbidden because pull request is closed')
1440 1440 raise HTTPForbidden()
1441 1441
1442 1442 if not comment:
1443 1443 log.debug('Comment with id:%s not found, skipping', comment_id)
1444 1444 # comment already deleted in another call probably
1445 1445 return True
1446 1446
1447 1447 if comment.pull_request.is_closed():
1448 1448 # don't allow deleting comments on closed pull request
1449 1449 raise HTTPForbidden()
1450 1450
1451 1451 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1452 1452 super_admin = h.HasPermissionAny('hg.admin')()
1453 1453 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1454 1454 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1455 1455 comment_repo_admin = is_repo_admin and is_repo_comment
1456 1456
1457 1457 if super_admin or comment_owner or comment_repo_admin:
1458 1458 old_calculated_status = comment.pull_request.calculated_review_status()
1459 1459 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1460 1460 Session().commit()
1461 1461 calculated_status = comment.pull_request.calculated_review_status()
1462 1462 if old_calculated_status != calculated_status:
1463 1463 PullRequestModel().trigger_pull_request_hook(
1464 1464 comment.pull_request, self._rhodecode_user, 'review_status_change',
1465 1465 data={'status': calculated_status})
1466 1466 return True
1467 1467 else:
1468 1468 log.warning('No permissions for user %s to delete comment_id: %s',
1469 1469 self._rhodecode_db_user, comment_id)
1470 1470 raise HTTPNotFound()
@@ -1,5446 +1,5446 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import uuid
33 33 import warnings
34 34 import ipaddress
35 35 import functools
36 36 import traceback
37 37 import collections
38 38
39 39 from sqlalchemy import (
40 or_, and_, not_, func, TypeDecorator, event,
40 or_, and_, not_, func, cast, TypeDecorator, event,
41 41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 43 Text, Float, PickleType, BigInteger)
44 44 from sqlalchemy.sql.expression import true, false, case
45 45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 46 from sqlalchemy.orm import (
47 47 relationship, joinedload, class_mapper, validates, aliased)
48 48 from sqlalchemy.ext.declarative import declared_attr
49 49 from sqlalchemy.ext.hybrid import hybrid_property
50 50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 51 from sqlalchemy.dialects.mysql import LONGTEXT
52 52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 53 from pyramid import compat
54 54 from pyramid.threadlocal import get_current_request
55 55 from webhelpers.text import collapse, remove_formatting
56 56
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance
59 59 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
60 60 from rhodecode.lib.utils2 import (
61 61 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
62 62 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
63 63 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
64 64 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
65 65 JsonRaw
66 66 from rhodecode.lib.ext_json import json
67 67 from rhodecode.lib.caching_query import FromCache
68 68 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
69 69 from rhodecode.lib.encrypt2 import Encryptor
70 70 from rhodecode.lib.exceptions import (
71 71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 72 from rhodecode.model.meta import Base, Session
73 73
74 74 URL_SEP = '/'
75 75 log = logging.getLogger(__name__)
76 76
77 77 # =============================================================================
78 78 # BASE CLASSES
79 79 # =============================================================================
80 80
81 81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 82 # beaker.session.secret if first is not set.
83 83 # and initialized at environment.py
84 84 ENCRYPTION_KEY = None
85 85
86 86 # used to sort permissions by types, '#' used here is not allowed to be in
87 87 # usernames, and it's very early in sorted string.printable table.
88 88 PERMISSION_TYPE_SORT = {
89 89 'admin': '####',
90 90 'write': '###',
91 91 'read': '##',
92 92 'none': '#',
93 93 }
94 94
95 95
96 96 def display_user_sort(obj):
97 97 """
98 98 Sort function used to sort permissions in .permissions() function of
99 99 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 100 of all other resources
101 101 """
102 102
103 103 if obj.username == User.DEFAULT_USER:
104 104 return '#####'
105 105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 106 return prefix + obj.username
107 107
108 108
109 109 def display_user_group_sort(obj):
110 110 """
111 111 Sort function used to sort permissions in .permissions() function of
112 112 Repository, RepoGroup, UserGroup. Also it put the default user in front
113 113 of all other resources
114 114 """
115 115
116 116 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
117 117 return prefix + obj.users_group_name
118 118
119 119
120 120 def _hash_key(k):
121 121 return sha1_safe(k)
122 122
123 123
124 124 def in_filter_generator(qry, items, limit=500):
125 125 """
126 126 Splits IN() into multiple with OR
127 127 e.g.::
128 128 cnt = Repository.query().filter(
129 129 or_(
130 130 *in_filter_generator(Repository.repo_id, range(100000))
131 131 )).count()
132 132 """
133 133 if not items:
134 134 # empty list will cause empty query which might cause security issues
135 135 # this can lead to hidden unpleasant results
136 136 items = [-1]
137 137
138 138 parts = []
139 139 for chunk in xrange(0, len(items), limit):
140 140 parts.append(
141 141 qry.in_(items[chunk: chunk + limit])
142 142 )
143 143
144 144 return parts
145 145
146 146
147 147 base_table_args = {
148 148 'extend_existing': True,
149 149 'mysql_engine': 'InnoDB',
150 150 'mysql_charset': 'utf8',
151 151 'sqlite_autoincrement': True
152 152 }
153 153
154 154
155 155 class EncryptedTextValue(TypeDecorator):
156 156 """
157 157 Special column for encrypted long text data, use like::
158 158
159 159 value = Column("encrypted_value", EncryptedValue(), nullable=False)
160 160
161 161 This column is intelligent so if value is in unencrypted form it return
162 162 unencrypted form, but on save it always encrypts
163 163 """
164 164 impl = Text
165 165
166 166 def process_bind_param(self, value, dialect):
167 167 """
168 168 Setter for storing value
169 169 """
170 170 import rhodecode
171 171 if not value:
172 172 return value
173 173
174 174 # protect against double encrypting if values is already encrypted
175 175 if value.startswith('enc$aes$') \
176 176 or value.startswith('enc$aes_hmac$') \
177 177 or value.startswith('enc2$'):
178 178 raise ValueError('value needs to be in unencrypted format, '
179 179 'ie. not starting with enc$ or enc2$')
180 180
181 181 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
182 182 if algo == 'aes':
183 183 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
184 184 elif algo == 'fernet':
185 185 return Encryptor(ENCRYPTION_KEY).encrypt(value)
186 186 else:
187 187 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
188 188
189 189 def process_result_value(self, value, dialect):
190 190 """
191 191 Getter for retrieving value
192 192 """
193 193
194 194 import rhodecode
195 195 if not value:
196 196 return value
197 197
198 198 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
199 199 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
200 200 if algo == 'aes':
201 201 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
202 202 elif algo == 'fernet':
203 203 return Encryptor(ENCRYPTION_KEY).decrypt(value)
204 204 else:
205 205 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
206 206 return decrypted_data
207 207
208 208
209 209 class BaseModel(object):
210 210 """
211 211 Base Model for all classes
212 212 """
213 213
214 214 @classmethod
215 215 def _get_keys(cls):
216 216 """return column names for this model """
217 217 return class_mapper(cls).c.keys()
218 218
219 219 def get_dict(self):
220 220 """
221 221 return dict with keys and values corresponding
222 222 to this model data """
223 223
224 224 d = {}
225 225 for k in self._get_keys():
226 226 d[k] = getattr(self, k)
227 227
228 228 # also use __json__() if present to get additional fields
229 229 _json_attr = getattr(self, '__json__', None)
230 230 if _json_attr:
231 231 # update with attributes from __json__
232 232 if callable(_json_attr):
233 233 _json_attr = _json_attr()
234 234 for k, val in _json_attr.iteritems():
235 235 d[k] = val
236 236 return d
237 237
238 238 def get_appstruct(self):
239 239 """return list with keys and values tuples corresponding
240 240 to this model data """
241 241
242 242 lst = []
243 243 for k in self._get_keys():
244 244 lst.append((k, getattr(self, k),))
245 245 return lst
246 246
247 247 def populate_obj(self, populate_dict):
248 248 """populate model with data from given populate_dict"""
249 249
250 250 for k in self._get_keys():
251 251 if k in populate_dict:
252 252 setattr(self, k, populate_dict[k])
253 253
254 254 @classmethod
255 255 def query(cls):
256 256 return Session().query(cls)
257 257
258 258 @classmethod
259 259 def get(cls, id_):
260 260 if id_:
261 261 return cls.query().get(id_)
262 262
263 263 @classmethod
264 264 def get_or_404(cls, id_):
265 265 from pyramid.httpexceptions import HTTPNotFound
266 266
267 267 try:
268 268 id_ = int(id_)
269 269 except (TypeError, ValueError):
270 270 raise HTTPNotFound()
271 271
272 272 res = cls.query().get(id_)
273 273 if not res:
274 274 raise HTTPNotFound()
275 275 return res
276 276
277 277 @classmethod
278 278 def getAll(cls):
279 279 # deprecated and left for backward compatibility
280 280 return cls.get_all()
281 281
282 282 @classmethod
283 283 def get_all(cls):
284 284 return cls.query().all()
285 285
286 286 @classmethod
287 287 def delete(cls, id_):
288 288 obj = cls.query().get(id_)
289 289 Session().delete(obj)
290 290
291 291 @classmethod
292 292 def identity_cache(cls, session, attr_name, value):
293 293 exist_in_session = []
294 294 for (item_cls, pkey), instance in session.identity_map.items():
295 295 if cls == item_cls and getattr(instance, attr_name) == value:
296 296 exist_in_session.append(instance)
297 297 if exist_in_session:
298 298 if len(exist_in_session) == 1:
299 299 return exist_in_session[0]
300 300 log.exception(
301 301 'multiple objects with attr %s and '
302 302 'value %s found with same name: %r',
303 303 attr_name, value, exist_in_session)
304 304
305 305 def __repr__(self):
306 306 if hasattr(self, '__unicode__'):
307 307 # python repr needs to return str
308 308 try:
309 309 return safe_str(self.__unicode__())
310 310 except UnicodeDecodeError:
311 311 pass
312 312 return '<DB:%s>' % (self.__class__.__name__)
313 313
314 314
315 315 class RhodeCodeSetting(Base, BaseModel):
316 316 __tablename__ = 'rhodecode_settings'
317 317 __table_args__ = (
318 318 UniqueConstraint('app_settings_name'),
319 319 base_table_args
320 320 )
321 321
322 322 SETTINGS_TYPES = {
323 323 'str': safe_str,
324 324 'int': safe_int,
325 325 'unicode': safe_unicode,
326 326 'bool': str2bool,
327 327 'list': functools.partial(aslist, sep=',')
328 328 }
329 329 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
330 330 GLOBAL_CONF_KEY = 'app_settings'
331 331
332 332 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
333 333 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
334 334 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
335 335 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
336 336
337 337 def __init__(self, key='', val='', type='unicode'):
338 338 self.app_settings_name = key
339 339 self.app_settings_type = type
340 340 self.app_settings_value = val
341 341
342 342 @validates('_app_settings_value')
343 343 def validate_settings_value(self, key, val):
344 344 assert type(val) == unicode
345 345 return val
346 346
347 347 @hybrid_property
348 348 def app_settings_value(self):
349 349 v = self._app_settings_value
350 350 _type = self.app_settings_type
351 351 if _type:
352 352 _type = self.app_settings_type.split('.')[0]
353 353 # decode the encrypted value
354 354 if 'encrypted' in self.app_settings_type:
355 355 cipher = EncryptedTextValue()
356 356 v = safe_unicode(cipher.process_result_value(v, None))
357 357
358 358 converter = self.SETTINGS_TYPES.get(_type) or \
359 359 self.SETTINGS_TYPES['unicode']
360 360 return converter(v)
361 361
362 362 @app_settings_value.setter
363 363 def app_settings_value(self, val):
364 364 """
365 365 Setter that will always make sure we use unicode in app_settings_value
366 366
367 367 :param val:
368 368 """
369 369 val = safe_unicode(val)
370 370 # encode the encrypted value
371 371 if 'encrypted' in self.app_settings_type:
372 372 cipher = EncryptedTextValue()
373 373 val = safe_unicode(cipher.process_bind_param(val, None))
374 374 self._app_settings_value = val
375 375
376 376 @hybrid_property
377 377 def app_settings_type(self):
378 378 return self._app_settings_type
379 379
380 380 @app_settings_type.setter
381 381 def app_settings_type(self, val):
382 382 if val.split('.')[0] not in self.SETTINGS_TYPES:
383 383 raise Exception('type must be one of %s got %s'
384 384 % (self.SETTINGS_TYPES.keys(), val))
385 385 self._app_settings_type = val
386 386
387 387 @classmethod
388 388 def get_by_prefix(cls, prefix):
389 389 return RhodeCodeSetting.query()\
390 390 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
391 391 .all()
392 392
393 393 def __unicode__(self):
394 394 return u"<%s('%s:%s[%s]')>" % (
395 395 self.__class__.__name__,
396 396 self.app_settings_name, self.app_settings_value,
397 397 self.app_settings_type
398 398 )
399 399
400 400
401 401 class RhodeCodeUi(Base, BaseModel):
402 402 __tablename__ = 'rhodecode_ui'
403 403 __table_args__ = (
404 404 UniqueConstraint('ui_key'),
405 405 base_table_args
406 406 )
407 407
408 408 HOOK_REPO_SIZE = 'changegroup.repo_size'
409 409 # HG
410 410 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
411 411 HOOK_PULL = 'outgoing.pull_logger'
412 412 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
413 413 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
414 414 HOOK_PUSH = 'changegroup.push_logger'
415 415 HOOK_PUSH_KEY = 'pushkey.key_push'
416 416
417 417 HOOKS_BUILTIN = [
418 418 HOOK_PRE_PULL,
419 419 HOOK_PULL,
420 420 HOOK_PRE_PUSH,
421 421 HOOK_PRETX_PUSH,
422 422 HOOK_PUSH,
423 423 HOOK_PUSH_KEY,
424 424 ]
425 425
426 426 # TODO: johbo: Unify way how hooks are configured for git and hg,
427 427 # git part is currently hardcoded.
428 428
429 429 # SVN PATTERNS
430 430 SVN_BRANCH_ID = 'vcs_svn_branch'
431 431 SVN_TAG_ID = 'vcs_svn_tag'
432 432
433 433 ui_id = Column(
434 434 "ui_id", Integer(), nullable=False, unique=True, default=None,
435 435 primary_key=True)
436 436 ui_section = Column(
437 437 "ui_section", String(255), nullable=True, unique=None, default=None)
438 438 ui_key = Column(
439 439 "ui_key", String(255), nullable=True, unique=None, default=None)
440 440 ui_value = Column(
441 441 "ui_value", String(255), nullable=True, unique=None, default=None)
442 442 ui_active = Column(
443 443 "ui_active", Boolean(), nullable=True, unique=None, default=True)
444 444
445 445 def __repr__(self):
446 446 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
447 447 self.ui_key, self.ui_value)
448 448
449 449
450 450 class RepoRhodeCodeSetting(Base, BaseModel):
451 451 __tablename__ = 'repo_rhodecode_settings'
452 452 __table_args__ = (
453 453 UniqueConstraint(
454 454 'app_settings_name', 'repository_id',
455 455 name='uq_repo_rhodecode_setting_name_repo_id'),
456 456 base_table_args
457 457 )
458 458
459 459 repository_id = Column(
460 460 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
461 461 nullable=False)
462 462 app_settings_id = Column(
463 463 "app_settings_id", Integer(), nullable=False, unique=True,
464 464 default=None, primary_key=True)
465 465 app_settings_name = Column(
466 466 "app_settings_name", String(255), nullable=True, unique=None,
467 467 default=None)
468 468 _app_settings_value = Column(
469 469 "app_settings_value", String(4096), nullable=True, unique=None,
470 470 default=None)
471 471 _app_settings_type = Column(
472 472 "app_settings_type", String(255), nullable=True, unique=None,
473 473 default=None)
474 474
475 475 repository = relationship('Repository')
476 476
477 477 def __init__(self, repository_id, key='', val='', type='unicode'):
478 478 self.repository_id = repository_id
479 479 self.app_settings_name = key
480 480 self.app_settings_type = type
481 481 self.app_settings_value = val
482 482
483 483 @validates('_app_settings_value')
484 484 def validate_settings_value(self, key, val):
485 485 assert type(val) == unicode
486 486 return val
487 487
488 488 @hybrid_property
489 489 def app_settings_value(self):
490 490 v = self._app_settings_value
491 491 type_ = self.app_settings_type
492 492 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
493 493 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
494 494 return converter(v)
495 495
496 496 @app_settings_value.setter
497 497 def app_settings_value(self, val):
498 498 """
499 499 Setter that will always make sure we use unicode in app_settings_value
500 500
501 501 :param val:
502 502 """
503 503 self._app_settings_value = safe_unicode(val)
504 504
505 505 @hybrid_property
506 506 def app_settings_type(self):
507 507 return self._app_settings_type
508 508
509 509 @app_settings_type.setter
510 510 def app_settings_type(self, val):
511 511 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
512 512 if val not in SETTINGS_TYPES:
513 513 raise Exception('type must be one of %s got %s'
514 514 % (SETTINGS_TYPES.keys(), val))
515 515 self._app_settings_type = val
516 516
517 517 def __unicode__(self):
518 518 return u"<%s('%s:%s:%s[%s]')>" % (
519 519 self.__class__.__name__, self.repository.repo_name,
520 520 self.app_settings_name, self.app_settings_value,
521 521 self.app_settings_type
522 522 )
523 523
524 524
525 525 class RepoRhodeCodeUi(Base, BaseModel):
526 526 __tablename__ = 'repo_rhodecode_ui'
527 527 __table_args__ = (
528 528 UniqueConstraint(
529 529 'repository_id', 'ui_section', 'ui_key',
530 530 name='uq_repo_rhodecode_ui_repository_id_section_key'),
531 531 base_table_args
532 532 )
533 533
534 534 repository_id = Column(
535 535 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
536 536 nullable=False)
537 537 ui_id = Column(
538 538 "ui_id", Integer(), nullable=False, unique=True, default=None,
539 539 primary_key=True)
540 540 ui_section = Column(
541 541 "ui_section", String(255), nullable=True, unique=None, default=None)
542 542 ui_key = Column(
543 543 "ui_key", String(255), nullable=True, unique=None, default=None)
544 544 ui_value = Column(
545 545 "ui_value", String(255), nullable=True, unique=None, default=None)
546 546 ui_active = Column(
547 547 "ui_active", Boolean(), nullable=True, unique=None, default=True)
548 548
549 549 repository = relationship('Repository')
550 550
551 551 def __repr__(self):
552 552 return '<%s[%s:%s]%s=>%s]>' % (
553 553 self.__class__.__name__, self.repository.repo_name,
554 554 self.ui_section, self.ui_key, self.ui_value)
555 555
556 556
557 557 class User(Base, BaseModel):
558 558 __tablename__ = 'users'
559 559 __table_args__ = (
560 560 UniqueConstraint('username'), UniqueConstraint('email'),
561 561 Index('u_username_idx', 'username'),
562 562 Index('u_email_idx', 'email'),
563 563 base_table_args
564 564 )
565 565
566 566 DEFAULT_USER = 'default'
567 567 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
568 568 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
569 569
570 570 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
571 571 username = Column("username", String(255), nullable=True, unique=None, default=None)
572 572 password = Column("password", String(255), nullable=True, unique=None, default=None)
573 573 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
574 574 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
575 575 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
576 576 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
577 577 _email = Column("email", String(255), nullable=True, unique=None, default=None)
578 578 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
579 579 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
580 580 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
581 581
582 582 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
583 583 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
584 584 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
585 585 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
586 586 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
587 587 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
588 588
589 589 user_log = relationship('UserLog')
590 590 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
591 591
592 592 repositories = relationship('Repository')
593 593 repository_groups = relationship('RepoGroup')
594 594 user_groups = relationship('UserGroup')
595 595
596 596 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
597 597 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
598 598
599 599 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
600 600 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
601 601 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
602 602
603 603 group_member = relationship('UserGroupMember', cascade='all')
604 604
605 605 notifications = relationship('UserNotification', cascade='all')
606 606 # notifications assigned to this user
607 607 user_created_notifications = relationship('Notification', cascade='all')
608 608 # comments created by this user
609 609 user_comments = relationship('ChangesetComment', cascade='all')
610 610 # user profile extra info
611 611 user_emails = relationship('UserEmailMap', cascade='all')
612 612 user_ip_map = relationship('UserIpMap', cascade='all')
613 613 user_auth_tokens = relationship('UserApiKeys', cascade='all')
614 614 user_ssh_keys = relationship('UserSshKeys', cascade='all')
615 615
616 616 # gists
617 617 user_gists = relationship('Gist', cascade='all')
618 618 # user pull requests
619 619 user_pull_requests = relationship('PullRequest', cascade='all')
620 620 # external identities
621 621 external_identities = relationship(
622 622 'ExternalIdentity',
623 623 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
624 624 cascade='all')
625 625 # review rules
626 626 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
627 627
628 628 # artifacts owned
629 629 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
630 630
631 631 # no cascade, set NULL
632 632 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
633 633
634 634 def __unicode__(self):
635 635 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
636 636 self.user_id, self.username)
637 637
638 638 @hybrid_property
639 639 def email(self):
640 640 return self._email
641 641
642 642 @email.setter
643 643 def email(self, val):
644 644 self._email = val.lower() if val else None
645 645
646 646 @hybrid_property
647 647 def first_name(self):
648 648 from rhodecode.lib import helpers as h
649 649 if self.name:
650 650 return h.escape(self.name)
651 651 return self.name
652 652
653 653 @hybrid_property
654 654 def last_name(self):
655 655 from rhodecode.lib import helpers as h
656 656 if self.lastname:
657 657 return h.escape(self.lastname)
658 658 return self.lastname
659 659
660 660 @hybrid_property
661 661 def api_key(self):
662 662 """
663 663 Fetch if exist an auth-token with role ALL connected to this user
664 664 """
665 665 user_auth_token = UserApiKeys.query()\
666 666 .filter(UserApiKeys.user_id == self.user_id)\
667 667 .filter(or_(UserApiKeys.expires == -1,
668 668 UserApiKeys.expires >= time.time()))\
669 669 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
670 670 if user_auth_token:
671 671 user_auth_token = user_auth_token.api_key
672 672
673 673 return user_auth_token
674 674
675 675 @api_key.setter
676 676 def api_key(self, val):
677 677 # don't allow to set API key this is deprecated for now
678 678 self._api_key = None
679 679
680 680 @property
681 681 def reviewer_pull_requests(self):
682 682 return PullRequestReviewers.query() \
683 683 .options(joinedload(PullRequestReviewers.pull_request)) \
684 684 .filter(PullRequestReviewers.user_id == self.user_id) \
685 685 .all()
686 686
687 687 @property
688 688 def firstname(self):
689 689 # alias for future
690 690 return self.name
691 691
692 692 @property
693 693 def emails(self):
694 694 other = UserEmailMap.query()\
695 695 .filter(UserEmailMap.user == self) \
696 696 .order_by(UserEmailMap.email_id.asc()) \
697 697 .all()
698 698 return [self.email] + [x.email for x in other]
699 699
700 700 def emails_cached(self):
701 701 emails = UserEmailMap.query()\
702 702 .filter(UserEmailMap.user == self) \
703 703 .order_by(UserEmailMap.email_id.asc())
704 704
705 705 emails = emails.options(
706 706 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
707 707 )
708 708
709 709 return [self.email] + [x.email for x in emails]
710 710
711 711 @property
712 712 def auth_tokens(self):
713 713 auth_tokens = self.get_auth_tokens()
714 714 return [x.api_key for x in auth_tokens]
715 715
716 716 def get_auth_tokens(self):
717 717 return UserApiKeys.query()\
718 718 .filter(UserApiKeys.user == self)\
719 719 .order_by(UserApiKeys.user_api_key_id.asc())\
720 720 .all()
721 721
722 722 @LazyProperty
723 723 def feed_token(self):
724 724 return self.get_feed_token()
725 725
726 726 def get_feed_token(self, cache=True):
727 727 feed_tokens = UserApiKeys.query()\
728 728 .filter(UserApiKeys.user == self)\
729 729 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
730 730 if cache:
731 731 feed_tokens = feed_tokens.options(
732 732 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
733 733
734 734 feed_tokens = feed_tokens.all()
735 735 if feed_tokens:
736 736 return feed_tokens[0].api_key
737 737 return 'NO_FEED_TOKEN_AVAILABLE'
738 738
739 739 @LazyProperty
740 740 def artifact_token(self):
741 741 return self.get_artifact_token()
742 742
743 743 def get_artifact_token(self, cache=True):
744 744 artifacts_tokens = UserApiKeys.query()\
745 745 .filter(UserApiKeys.user == self)\
746 746 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
747 747 if cache:
748 748 artifacts_tokens = artifacts_tokens.options(
749 749 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
750 750
751 751 artifacts_tokens = artifacts_tokens.all()
752 752 if artifacts_tokens:
753 753 return artifacts_tokens[0].api_key
754 754 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
755 755
756 756 @classmethod
757 757 def get(cls, user_id, cache=False):
758 758 if not user_id:
759 759 return
760 760
761 761 user = cls.query()
762 762 if cache:
763 763 user = user.options(
764 764 FromCache("sql_cache_short", "get_users_%s" % user_id))
765 765 return user.get(user_id)
766 766
767 767 @classmethod
768 768 def extra_valid_auth_tokens(cls, user, role=None):
769 769 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
770 770 .filter(or_(UserApiKeys.expires == -1,
771 771 UserApiKeys.expires >= time.time()))
772 772 if role:
773 773 tokens = tokens.filter(or_(UserApiKeys.role == role,
774 774 UserApiKeys.role == UserApiKeys.ROLE_ALL))
775 775 return tokens.all()
776 776
777 777 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
778 778 from rhodecode.lib import auth
779 779
780 780 log.debug('Trying to authenticate user: %s via auth-token, '
781 781 'and roles: %s', self, roles)
782 782
783 783 if not auth_token:
784 784 return False
785 785
786 786 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
787 787 tokens_q = UserApiKeys.query()\
788 788 .filter(UserApiKeys.user_id == self.user_id)\
789 789 .filter(or_(UserApiKeys.expires == -1,
790 790 UserApiKeys.expires >= time.time()))
791 791
792 792 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
793 793
794 794 crypto_backend = auth.crypto_backend()
795 795 enc_token_map = {}
796 796 plain_token_map = {}
797 797 for token in tokens_q:
798 798 if token.api_key.startswith(crypto_backend.ENC_PREF):
799 799 enc_token_map[token.api_key] = token
800 800 else:
801 801 plain_token_map[token.api_key] = token
802 802 log.debug(
803 803 'Found %s plain and %s encrypted tokens to check for authentication for this user',
804 804 len(plain_token_map), len(enc_token_map))
805 805
806 806 # plain token match comes first
807 807 match = plain_token_map.get(auth_token)
808 808
809 809 # check encrypted tokens now
810 810 if not match:
811 811 for token_hash, token in enc_token_map.items():
812 812 # NOTE(marcink): this is expensive to calculate, but most secure
813 813 if crypto_backend.hash_check(auth_token, token_hash):
814 814 match = token
815 815 break
816 816
817 817 if match:
818 818 log.debug('Found matching token %s', match)
819 819 if match.repo_id:
820 820 log.debug('Found scope, checking for scope match of token %s', match)
821 821 if match.repo_id == scope_repo_id:
822 822 return True
823 823 else:
824 824 log.debug(
825 825 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
826 826 'and calling scope is:%s, skipping further checks',
827 827 match.repo, scope_repo_id)
828 828 return False
829 829 else:
830 830 return True
831 831
832 832 return False
833 833
834 834 @property
835 835 def ip_addresses(self):
836 836 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
837 837 return [x.ip_addr for x in ret]
838 838
839 839 @property
840 840 def username_and_name(self):
841 841 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
842 842
843 843 @property
844 844 def username_or_name_or_email(self):
845 845 full_name = self.full_name if self.full_name is not ' ' else None
846 846 return self.username or full_name or self.email
847 847
848 848 @property
849 849 def full_name(self):
850 850 return '%s %s' % (self.first_name, self.last_name)
851 851
852 852 @property
853 853 def full_name_or_username(self):
854 854 return ('%s %s' % (self.first_name, self.last_name)
855 855 if (self.first_name and self.last_name) else self.username)
856 856
857 857 @property
858 858 def full_contact(self):
859 859 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
860 860
861 861 @property
862 862 def short_contact(self):
863 863 return '%s %s' % (self.first_name, self.last_name)
864 864
865 865 @property
866 866 def is_admin(self):
867 867 return self.admin
868 868
869 869 @property
870 870 def language(self):
871 871 return self.user_data.get('language')
872 872
873 873 def AuthUser(self, **kwargs):
874 874 """
875 875 Returns instance of AuthUser for this user
876 876 """
877 877 from rhodecode.lib.auth import AuthUser
878 878 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
879 879
880 880 @hybrid_property
881 881 def user_data(self):
882 882 if not self._user_data:
883 883 return {}
884 884
885 885 try:
886 886 return json.loads(self._user_data)
887 887 except TypeError:
888 888 return {}
889 889
890 890 @user_data.setter
891 891 def user_data(self, val):
892 892 if not isinstance(val, dict):
893 893 raise Exception('user_data must be dict, got %s' % type(val))
894 894 try:
895 895 self._user_data = json.dumps(val)
896 896 except Exception:
897 897 log.error(traceback.format_exc())
898 898
899 899 @classmethod
900 900 def get_by_username(cls, username, case_insensitive=False,
901 901 cache=False, identity_cache=False):
902 902 session = Session()
903 903
904 904 if case_insensitive:
905 905 q = cls.query().filter(
906 906 func.lower(cls.username) == func.lower(username))
907 907 else:
908 908 q = cls.query().filter(cls.username == username)
909 909
910 910 if cache:
911 911 if identity_cache:
912 912 val = cls.identity_cache(session, 'username', username)
913 913 if val:
914 914 return val
915 915 else:
916 916 cache_key = "get_user_by_name_%s" % _hash_key(username)
917 917 q = q.options(
918 918 FromCache("sql_cache_short", cache_key))
919 919
920 920 return q.scalar()
921 921
922 922 @classmethod
923 923 def get_by_auth_token(cls, auth_token, cache=False):
924 924 q = UserApiKeys.query()\
925 925 .filter(UserApiKeys.api_key == auth_token)\
926 926 .filter(or_(UserApiKeys.expires == -1,
927 927 UserApiKeys.expires >= time.time()))
928 928 if cache:
929 929 q = q.options(
930 930 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
931 931
932 932 match = q.first()
933 933 if match:
934 934 return match.user
935 935
936 936 @classmethod
937 937 def get_by_email(cls, email, case_insensitive=False, cache=False):
938 938
939 939 if case_insensitive:
940 940 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
941 941
942 942 else:
943 943 q = cls.query().filter(cls.email == email)
944 944
945 945 email_key = _hash_key(email)
946 946 if cache:
947 947 q = q.options(
948 948 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
949 949
950 950 ret = q.scalar()
951 951 if ret is None:
952 952 q = UserEmailMap.query()
953 953 # try fetching in alternate email map
954 954 if case_insensitive:
955 955 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
956 956 else:
957 957 q = q.filter(UserEmailMap.email == email)
958 958 q = q.options(joinedload(UserEmailMap.user))
959 959 if cache:
960 960 q = q.options(
961 961 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
962 962 ret = getattr(q.scalar(), 'user', None)
963 963
964 964 return ret
965 965
966 966 @classmethod
967 967 def get_from_cs_author(cls, author):
968 968 """
969 969 Tries to get User objects out of commit author string
970 970
971 971 :param author:
972 972 """
973 973 from rhodecode.lib.helpers import email, author_name
974 974 # Valid email in the attribute passed, see if they're in the system
975 975 _email = email(author)
976 976 if _email:
977 977 user = cls.get_by_email(_email, case_insensitive=True)
978 978 if user:
979 979 return user
980 980 # Maybe we can match by username?
981 981 _author = author_name(author)
982 982 user = cls.get_by_username(_author, case_insensitive=True)
983 983 if user:
984 984 return user
985 985
986 986 def update_userdata(self, **kwargs):
987 987 usr = self
988 988 old = usr.user_data
989 989 old.update(**kwargs)
990 990 usr.user_data = old
991 991 Session().add(usr)
992 992 log.debug('updated userdata with %s', kwargs)
993 993
994 994 def update_lastlogin(self):
995 995 """Update user lastlogin"""
996 996 self.last_login = datetime.datetime.now()
997 997 Session().add(self)
998 998 log.debug('updated user %s lastlogin', self.username)
999 999
1000 1000 def update_password(self, new_password):
1001 1001 from rhodecode.lib.auth import get_crypt_password
1002 1002
1003 1003 self.password = get_crypt_password(new_password)
1004 1004 Session().add(self)
1005 1005
1006 1006 @classmethod
1007 1007 def get_first_super_admin(cls):
1008 1008 user = User.query()\
1009 1009 .filter(User.admin == true()) \
1010 1010 .order_by(User.user_id.asc()) \
1011 1011 .first()
1012 1012
1013 1013 if user is None:
1014 1014 raise Exception('FATAL: Missing administrative account!')
1015 1015 return user
1016 1016
1017 1017 @classmethod
1018 1018 def get_all_super_admins(cls, only_active=False):
1019 1019 """
1020 1020 Returns all admin accounts sorted by username
1021 1021 """
1022 1022 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1023 1023 if only_active:
1024 1024 qry = qry.filter(User.active == true())
1025 1025 return qry.all()
1026 1026
1027 1027 @classmethod
1028 1028 def get_default_user(cls, cache=False, refresh=False):
1029 1029 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1030 1030 if user is None:
1031 1031 raise Exception('FATAL: Missing default account!')
1032 1032 if refresh:
1033 1033 # The default user might be based on outdated state which
1034 1034 # has been loaded from the cache.
1035 1035 # A call to refresh() ensures that the
1036 1036 # latest state from the database is used.
1037 1037 Session().refresh(user)
1038 1038 return user
1039 1039
1040 1040 def _get_default_perms(self, user, suffix=''):
1041 1041 from rhodecode.model.permission import PermissionModel
1042 1042 return PermissionModel().get_default_perms(user.user_perms, suffix)
1043 1043
1044 1044 def get_default_perms(self, suffix=''):
1045 1045 return self._get_default_perms(self, suffix)
1046 1046
1047 1047 def get_api_data(self, include_secrets=False, details='full'):
1048 1048 """
1049 1049 Common function for generating user related data for API
1050 1050
1051 1051 :param include_secrets: By default secrets in the API data will be replaced
1052 1052 by a placeholder value to prevent exposing this data by accident. In case
1053 1053 this data shall be exposed, set this flag to ``True``.
1054 1054
1055 1055 :param details: details can be 'basic|full' basic gives only a subset of
1056 1056 the available user information that includes user_id, name and emails.
1057 1057 """
1058 1058 user = self
1059 1059 user_data = self.user_data
1060 1060 data = {
1061 1061 'user_id': user.user_id,
1062 1062 'username': user.username,
1063 1063 'firstname': user.name,
1064 1064 'lastname': user.lastname,
1065 1065 'description': user.description,
1066 1066 'email': user.email,
1067 1067 'emails': user.emails,
1068 1068 }
1069 1069 if details == 'basic':
1070 1070 return data
1071 1071
1072 1072 auth_token_length = 40
1073 1073 auth_token_replacement = '*' * auth_token_length
1074 1074
1075 1075 extras = {
1076 1076 'auth_tokens': [auth_token_replacement],
1077 1077 'active': user.active,
1078 1078 'admin': user.admin,
1079 1079 'extern_type': user.extern_type,
1080 1080 'extern_name': user.extern_name,
1081 1081 'last_login': user.last_login,
1082 1082 'last_activity': user.last_activity,
1083 1083 'ip_addresses': user.ip_addresses,
1084 1084 'language': user_data.get('language')
1085 1085 }
1086 1086 data.update(extras)
1087 1087
1088 1088 if include_secrets:
1089 1089 data['auth_tokens'] = user.auth_tokens
1090 1090 return data
1091 1091
1092 1092 def __json__(self):
1093 1093 data = {
1094 1094 'full_name': self.full_name,
1095 1095 'full_name_or_username': self.full_name_or_username,
1096 1096 'short_contact': self.short_contact,
1097 1097 'full_contact': self.full_contact,
1098 1098 }
1099 1099 data.update(self.get_api_data())
1100 1100 return data
1101 1101
1102 1102
1103 1103 class UserApiKeys(Base, BaseModel):
1104 1104 __tablename__ = 'user_api_keys'
1105 1105 __table_args__ = (
1106 1106 Index('uak_api_key_idx', 'api_key'),
1107 1107 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1108 1108 base_table_args
1109 1109 )
1110 1110 __mapper_args__ = {}
1111 1111
1112 1112 # ApiKey role
1113 1113 ROLE_ALL = 'token_role_all'
1114 1114 ROLE_HTTP = 'token_role_http'
1115 1115 ROLE_VCS = 'token_role_vcs'
1116 1116 ROLE_API = 'token_role_api'
1117 1117 ROLE_FEED = 'token_role_feed'
1118 1118 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1119 1119 ROLE_PASSWORD_RESET = 'token_password_reset'
1120 1120
1121 1121 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1122 1122
1123 1123 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1124 1124 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1125 1125 api_key = Column("api_key", String(255), nullable=False, unique=True)
1126 1126 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1127 1127 expires = Column('expires', Float(53), nullable=False)
1128 1128 role = Column('role', String(255), nullable=True)
1129 1129 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1130 1130
1131 1131 # scope columns
1132 1132 repo_id = Column(
1133 1133 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1134 1134 nullable=True, unique=None, default=None)
1135 1135 repo = relationship('Repository', lazy='joined')
1136 1136
1137 1137 repo_group_id = Column(
1138 1138 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1139 1139 nullable=True, unique=None, default=None)
1140 1140 repo_group = relationship('RepoGroup', lazy='joined')
1141 1141
1142 1142 user = relationship('User', lazy='joined')
1143 1143
1144 1144 def __unicode__(self):
1145 1145 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1146 1146
1147 1147 def __json__(self):
1148 1148 data = {
1149 1149 'auth_token': self.api_key,
1150 1150 'role': self.role,
1151 1151 'scope': self.scope_humanized,
1152 1152 'expired': self.expired
1153 1153 }
1154 1154 return data
1155 1155
1156 1156 def get_api_data(self, include_secrets=False):
1157 1157 data = self.__json__()
1158 1158 if include_secrets:
1159 1159 return data
1160 1160 else:
1161 1161 data['auth_token'] = self.token_obfuscated
1162 1162 return data
1163 1163
1164 1164 @hybrid_property
1165 1165 def description_safe(self):
1166 1166 from rhodecode.lib import helpers as h
1167 1167 return h.escape(self.description)
1168 1168
1169 1169 @property
1170 1170 def expired(self):
1171 1171 if self.expires == -1:
1172 1172 return False
1173 1173 return time.time() > self.expires
1174 1174
1175 1175 @classmethod
1176 1176 def _get_role_name(cls, role):
1177 1177 return {
1178 1178 cls.ROLE_ALL: _('all'),
1179 1179 cls.ROLE_HTTP: _('http/web interface'),
1180 1180 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1181 1181 cls.ROLE_API: _('api calls'),
1182 1182 cls.ROLE_FEED: _('feed access'),
1183 1183 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1184 1184 }.get(role, role)
1185 1185
1186 1186 @property
1187 1187 def role_humanized(self):
1188 1188 return self._get_role_name(self.role)
1189 1189
1190 1190 def _get_scope(self):
1191 1191 if self.repo:
1192 1192 return 'Repository: {}'.format(self.repo.repo_name)
1193 1193 if self.repo_group:
1194 1194 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1195 1195 return 'Global'
1196 1196
1197 1197 @property
1198 1198 def scope_humanized(self):
1199 1199 return self._get_scope()
1200 1200
1201 1201 @property
1202 1202 def token_obfuscated(self):
1203 1203 if self.api_key:
1204 1204 return self.api_key[:4] + "****"
1205 1205
1206 1206
1207 1207 class UserEmailMap(Base, BaseModel):
1208 1208 __tablename__ = 'user_email_map'
1209 1209 __table_args__ = (
1210 1210 Index('uem_email_idx', 'email'),
1211 1211 UniqueConstraint('email'),
1212 1212 base_table_args
1213 1213 )
1214 1214 __mapper_args__ = {}
1215 1215
1216 1216 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1217 1217 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1218 1218 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1219 1219 user = relationship('User', lazy='joined')
1220 1220
1221 1221 @validates('_email')
1222 1222 def validate_email(self, key, email):
1223 1223 # check if this email is not main one
1224 1224 main_email = Session().query(User).filter(User.email == email).scalar()
1225 1225 if main_email is not None:
1226 1226 raise AttributeError('email %s is present is user table' % email)
1227 1227 return email
1228 1228
1229 1229 @hybrid_property
1230 1230 def email(self):
1231 1231 return self._email
1232 1232
1233 1233 @email.setter
1234 1234 def email(self, val):
1235 1235 self._email = val.lower() if val else None
1236 1236
1237 1237
1238 1238 class UserIpMap(Base, BaseModel):
1239 1239 __tablename__ = 'user_ip_map'
1240 1240 __table_args__ = (
1241 1241 UniqueConstraint('user_id', 'ip_addr'),
1242 1242 base_table_args
1243 1243 )
1244 1244 __mapper_args__ = {}
1245 1245
1246 1246 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1247 1247 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1248 1248 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1249 1249 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1250 1250 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1251 1251 user = relationship('User', lazy='joined')
1252 1252
1253 1253 @hybrid_property
1254 1254 def description_safe(self):
1255 1255 from rhodecode.lib import helpers as h
1256 1256 return h.escape(self.description)
1257 1257
1258 1258 @classmethod
1259 1259 def _get_ip_range(cls, ip_addr):
1260 1260 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1261 1261 return [str(net.network_address), str(net.broadcast_address)]
1262 1262
1263 1263 def __json__(self):
1264 1264 return {
1265 1265 'ip_addr': self.ip_addr,
1266 1266 'ip_range': self._get_ip_range(self.ip_addr),
1267 1267 }
1268 1268
1269 1269 def __unicode__(self):
1270 1270 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1271 1271 self.user_id, self.ip_addr)
1272 1272
1273 1273
1274 1274 class UserSshKeys(Base, BaseModel):
1275 1275 __tablename__ = 'user_ssh_keys'
1276 1276 __table_args__ = (
1277 1277 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1278 1278
1279 1279 UniqueConstraint('ssh_key_fingerprint'),
1280 1280
1281 1281 base_table_args
1282 1282 )
1283 1283 __mapper_args__ = {}
1284 1284
1285 1285 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1286 1286 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1287 1287 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1288 1288
1289 1289 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1290 1290
1291 1291 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1292 1292 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1293 1293 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1294 1294
1295 1295 user = relationship('User', lazy='joined')
1296 1296
1297 1297 def __json__(self):
1298 1298 data = {
1299 1299 'ssh_fingerprint': self.ssh_key_fingerprint,
1300 1300 'description': self.description,
1301 1301 'created_on': self.created_on
1302 1302 }
1303 1303 return data
1304 1304
1305 1305 def get_api_data(self):
1306 1306 data = self.__json__()
1307 1307 return data
1308 1308
1309 1309
1310 1310 class UserLog(Base, BaseModel):
1311 1311 __tablename__ = 'user_logs'
1312 1312 __table_args__ = (
1313 1313 base_table_args,
1314 1314 )
1315 1315
1316 1316 VERSION_1 = 'v1'
1317 1317 VERSION_2 = 'v2'
1318 1318 VERSIONS = [VERSION_1, VERSION_2]
1319 1319
1320 1320 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1321 1321 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1322 1322 username = Column("username", String(255), nullable=True, unique=None, default=None)
1323 1323 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1324 1324 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1325 1325 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1326 1326 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1327 1327 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1328 1328
1329 1329 version = Column("version", String(255), nullable=True, default=VERSION_1)
1330 1330 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1331 1331 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1332 1332
1333 1333 def __unicode__(self):
1334 1334 return u"<%s('id:%s:%s')>" % (
1335 1335 self.__class__.__name__, self.repository_name, self.action)
1336 1336
1337 1337 def __json__(self):
1338 1338 return {
1339 1339 'user_id': self.user_id,
1340 1340 'username': self.username,
1341 1341 'repository_id': self.repository_id,
1342 1342 'repository_name': self.repository_name,
1343 1343 'user_ip': self.user_ip,
1344 1344 'action_date': self.action_date,
1345 1345 'action': self.action,
1346 1346 }
1347 1347
1348 1348 @hybrid_property
1349 1349 def entry_id(self):
1350 1350 return self.user_log_id
1351 1351
1352 1352 @property
1353 1353 def action_as_day(self):
1354 1354 return datetime.date(*self.action_date.timetuple()[:3])
1355 1355
1356 1356 user = relationship('User')
1357 1357 repository = relationship('Repository', cascade='')
1358 1358
1359 1359
1360 1360 class UserGroup(Base, BaseModel):
1361 1361 __tablename__ = 'users_groups'
1362 1362 __table_args__ = (
1363 1363 base_table_args,
1364 1364 )
1365 1365
1366 1366 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1367 1367 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1368 1368 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1369 1369 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1370 1370 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1371 1371 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1372 1372 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1373 1373 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1374 1374
1375 1375 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1376 1376 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1377 1377 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1378 1378 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1379 1379 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1380 1380 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1381 1381
1382 1382 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1383 1383 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1384 1384
1385 1385 @classmethod
1386 1386 def _load_group_data(cls, column):
1387 1387 if not column:
1388 1388 return {}
1389 1389
1390 1390 try:
1391 1391 return json.loads(column) or {}
1392 1392 except TypeError:
1393 1393 return {}
1394 1394
1395 1395 @hybrid_property
1396 1396 def description_safe(self):
1397 1397 from rhodecode.lib import helpers as h
1398 1398 return h.escape(self.user_group_description)
1399 1399
1400 1400 @hybrid_property
1401 1401 def group_data(self):
1402 1402 return self._load_group_data(self._group_data)
1403 1403
1404 1404 @group_data.expression
1405 1405 def group_data(self, **kwargs):
1406 1406 return self._group_data
1407 1407
1408 1408 @group_data.setter
1409 1409 def group_data(self, val):
1410 1410 try:
1411 1411 self._group_data = json.dumps(val)
1412 1412 except Exception:
1413 1413 log.error(traceback.format_exc())
1414 1414
1415 1415 @classmethod
1416 1416 def _load_sync(cls, group_data):
1417 1417 if group_data:
1418 1418 return group_data.get('extern_type')
1419 1419
1420 1420 @property
1421 1421 def sync(self):
1422 1422 return self._load_sync(self.group_data)
1423 1423
1424 1424 def __unicode__(self):
1425 1425 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1426 1426 self.users_group_id,
1427 1427 self.users_group_name)
1428 1428
1429 1429 @classmethod
1430 1430 def get_by_group_name(cls, group_name, cache=False,
1431 1431 case_insensitive=False):
1432 1432 if case_insensitive:
1433 1433 q = cls.query().filter(func.lower(cls.users_group_name) ==
1434 1434 func.lower(group_name))
1435 1435
1436 1436 else:
1437 1437 q = cls.query().filter(cls.users_group_name == group_name)
1438 1438 if cache:
1439 1439 q = q.options(
1440 1440 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1441 1441 return q.scalar()
1442 1442
1443 1443 @classmethod
1444 1444 def get(cls, user_group_id, cache=False):
1445 1445 if not user_group_id:
1446 1446 return
1447 1447
1448 1448 user_group = cls.query()
1449 1449 if cache:
1450 1450 user_group = user_group.options(
1451 1451 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1452 1452 return user_group.get(user_group_id)
1453 1453
1454 1454 def permissions(self, with_admins=True, with_owner=True,
1455 1455 expand_from_user_groups=False):
1456 1456 """
1457 1457 Permissions for user groups
1458 1458 """
1459 1459 _admin_perm = 'usergroup.admin'
1460 1460
1461 1461 owner_row = []
1462 1462 if with_owner:
1463 1463 usr = AttributeDict(self.user.get_dict())
1464 1464 usr.owner_row = True
1465 1465 usr.permission = _admin_perm
1466 1466 owner_row.append(usr)
1467 1467
1468 1468 super_admin_ids = []
1469 1469 super_admin_rows = []
1470 1470 if with_admins:
1471 1471 for usr in User.get_all_super_admins():
1472 1472 super_admin_ids.append(usr.user_id)
1473 1473 # if this admin is also owner, don't double the record
1474 1474 if usr.user_id == owner_row[0].user_id:
1475 1475 owner_row[0].admin_row = True
1476 1476 else:
1477 1477 usr = AttributeDict(usr.get_dict())
1478 1478 usr.admin_row = True
1479 1479 usr.permission = _admin_perm
1480 1480 super_admin_rows.append(usr)
1481 1481
1482 1482 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1483 1483 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1484 1484 joinedload(UserUserGroupToPerm.user),
1485 1485 joinedload(UserUserGroupToPerm.permission),)
1486 1486
1487 1487 # get owners and admins and permissions. We do a trick of re-writing
1488 1488 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1489 1489 # has a global reference and changing one object propagates to all
1490 1490 # others. This means if admin is also an owner admin_row that change
1491 1491 # would propagate to both objects
1492 1492 perm_rows = []
1493 1493 for _usr in q.all():
1494 1494 usr = AttributeDict(_usr.user.get_dict())
1495 1495 # if this user is also owner/admin, mark as duplicate record
1496 1496 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1497 1497 usr.duplicate_perm = True
1498 1498 usr.permission = _usr.permission.permission_name
1499 1499 perm_rows.append(usr)
1500 1500
1501 1501 # filter the perm rows by 'default' first and then sort them by
1502 1502 # admin,write,read,none permissions sorted again alphabetically in
1503 1503 # each group
1504 1504 perm_rows = sorted(perm_rows, key=display_user_sort)
1505 1505
1506 1506 user_groups_rows = []
1507 1507 if expand_from_user_groups:
1508 1508 for ug in self.permission_user_groups(with_members=True):
1509 1509 for user_data in ug.members:
1510 1510 user_groups_rows.append(user_data)
1511 1511
1512 1512 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1513 1513
1514 1514 def permission_user_groups(self, with_members=False):
1515 1515 q = UserGroupUserGroupToPerm.query()\
1516 1516 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1517 1517 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1518 1518 joinedload(UserGroupUserGroupToPerm.target_user_group),
1519 1519 joinedload(UserGroupUserGroupToPerm.permission),)
1520 1520
1521 1521 perm_rows = []
1522 1522 for _user_group in q.all():
1523 1523 entry = AttributeDict(_user_group.user_group.get_dict())
1524 1524 entry.permission = _user_group.permission.permission_name
1525 1525 if with_members:
1526 1526 entry.members = [x.user.get_dict()
1527 1527 for x in _user_group.user_group.members]
1528 1528 perm_rows.append(entry)
1529 1529
1530 1530 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1531 1531 return perm_rows
1532 1532
1533 1533 def _get_default_perms(self, user_group, suffix=''):
1534 1534 from rhodecode.model.permission import PermissionModel
1535 1535 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1536 1536
1537 1537 def get_default_perms(self, suffix=''):
1538 1538 return self._get_default_perms(self, suffix)
1539 1539
1540 1540 def get_api_data(self, with_group_members=True, include_secrets=False):
1541 1541 """
1542 1542 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1543 1543 basically forwarded.
1544 1544
1545 1545 """
1546 1546 user_group = self
1547 1547 data = {
1548 1548 'users_group_id': user_group.users_group_id,
1549 1549 'group_name': user_group.users_group_name,
1550 1550 'group_description': user_group.user_group_description,
1551 1551 'active': user_group.users_group_active,
1552 1552 'owner': user_group.user.username,
1553 1553 'sync': user_group.sync,
1554 1554 'owner_email': user_group.user.email,
1555 1555 }
1556 1556
1557 1557 if with_group_members:
1558 1558 users = []
1559 1559 for user in user_group.members:
1560 1560 user = user.user
1561 1561 users.append(user.get_api_data(include_secrets=include_secrets))
1562 1562 data['users'] = users
1563 1563
1564 1564 return data
1565 1565
1566 1566
1567 1567 class UserGroupMember(Base, BaseModel):
1568 1568 __tablename__ = 'users_groups_members'
1569 1569 __table_args__ = (
1570 1570 base_table_args,
1571 1571 )
1572 1572
1573 1573 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1574 1574 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1575 1575 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1576 1576
1577 1577 user = relationship('User', lazy='joined')
1578 1578 users_group = relationship('UserGroup')
1579 1579
1580 1580 def __init__(self, gr_id='', u_id=''):
1581 1581 self.users_group_id = gr_id
1582 1582 self.user_id = u_id
1583 1583
1584 1584
1585 1585 class RepositoryField(Base, BaseModel):
1586 1586 __tablename__ = 'repositories_fields'
1587 1587 __table_args__ = (
1588 1588 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1589 1589 base_table_args,
1590 1590 )
1591 1591
1592 1592 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1593 1593
1594 1594 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1595 1595 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1596 1596 field_key = Column("field_key", String(250))
1597 1597 field_label = Column("field_label", String(1024), nullable=False)
1598 1598 field_value = Column("field_value", String(10000), nullable=False)
1599 1599 field_desc = Column("field_desc", String(1024), nullable=False)
1600 1600 field_type = Column("field_type", String(255), nullable=False, unique=None)
1601 1601 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1602 1602
1603 1603 repository = relationship('Repository')
1604 1604
1605 1605 @property
1606 1606 def field_key_prefixed(self):
1607 1607 return 'ex_%s' % self.field_key
1608 1608
1609 1609 @classmethod
1610 1610 def un_prefix_key(cls, key):
1611 1611 if key.startswith(cls.PREFIX):
1612 1612 return key[len(cls.PREFIX):]
1613 1613 return key
1614 1614
1615 1615 @classmethod
1616 1616 def get_by_key_name(cls, key, repo):
1617 1617 row = cls.query()\
1618 1618 .filter(cls.repository == repo)\
1619 1619 .filter(cls.field_key == key).scalar()
1620 1620 return row
1621 1621
1622 1622
1623 1623 class Repository(Base, BaseModel):
1624 1624 __tablename__ = 'repositories'
1625 1625 __table_args__ = (
1626 1626 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1627 1627 base_table_args,
1628 1628 )
1629 1629 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1630 1630 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1631 1631 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1632 1632
1633 1633 STATE_CREATED = 'repo_state_created'
1634 1634 STATE_PENDING = 'repo_state_pending'
1635 1635 STATE_ERROR = 'repo_state_error'
1636 1636
1637 1637 LOCK_AUTOMATIC = 'lock_auto'
1638 1638 LOCK_API = 'lock_api'
1639 1639 LOCK_WEB = 'lock_web'
1640 1640 LOCK_PULL = 'lock_pull'
1641 1641
1642 1642 NAME_SEP = URL_SEP
1643 1643
1644 1644 repo_id = Column(
1645 1645 "repo_id", Integer(), nullable=False, unique=True, default=None,
1646 1646 primary_key=True)
1647 1647 _repo_name = Column(
1648 1648 "repo_name", Text(), nullable=False, default=None)
1649 1649 _repo_name_hash = Column(
1650 1650 "repo_name_hash", String(255), nullable=False, unique=True)
1651 1651 repo_state = Column("repo_state", String(255), nullable=True)
1652 1652
1653 1653 clone_uri = Column(
1654 1654 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1655 1655 default=None)
1656 1656 push_uri = Column(
1657 1657 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1658 1658 default=None)
1659 1659 repo_type = Column(
1660 1660 "repo_type", String(255), nullable=False, unique=False, default=None)
1661 1661 user_id = Column(
1662 1662 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1663 1663 unique=False, default=None)
1664 1664 private = Column(
1665 1665 "private", Boolean(), nullable=True, unique=None, default=None)
1666 1666 archived = Column(
1667 1667 "archived", Boolean(), nullable=True, unique=None, default=None)
1668 1668 enable_statistics = Column(
1669 1669 "statistics", Boolean(), nullable=True, unique=None, default=True)
1670 1670 enable_downloads = Column(
1671 1671 "downloads", Boolean(), nullable=True, unique=None, default=True)
1672 1672 description = Column(
1673 1673 "description", String(10000), nullable=True, unique=None, default=None)
1674 1674 created_on = Column(
1675 1675 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1676 1676 default=datetime.datetime.now)
1677 1677 updated_on = Column(
1678 1678 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1679 1679 default=datetime.datetime.now)
1680 1680 _landing_revision = Column(
1681 1681 "landing_revision", String(255), nullable=False, unique=False,
1682 1682 default=None)
1683 1683 enable_locking = Column(
1684 1684 "enable_locking", Boolean(), nullable=False, unique=None,
1685 1685 default=False)
1686 1686 _locked = Column(
1687 1687 "locked", String(255), nullable=True, unique=False, default=None)
1688 1688 _changeset_cache = Column(
1689 1689 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1690 1690
1691 1691 fork_id = Column(
1692 1692 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1693 1693 nullable=True, unique=False, default=None)
1694 1694 group_id = Column(
1695 1695 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1696 1696 unique=False, default=None)
1697 1697
1698 1698 user = relationship('User', lazy='joined')
1699 1699 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1700 1700 group = relationship('RepoGroup', lazy='joined')
1701 1701 repo_to_perm = relationship(
1702 1702 'UserRepoToPerm', cascade='all',
1703 1703 order_by='UserRepoToPerm.repo_to_perm_id')
1704 1704 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1705 1705 stats = relationship('Statistics', cascade='all', uselist=False)
1706 1706
1707 1707 followers = relationship(
1708 1708 'UserFollowing',
1709 1709 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1710 1710 cascade='all')
1711 1711 extra_fields = relationship(
1712 1712 'RepositoryField', cascade="all, delete-orphan")
1713 1713 logs = relationship('UserLog')
1714 1714 comments = relationship(
1715 1715 'ChangesetComment', cascade="all, delete-orphan")
1716 1716 pull_requests_source = relationship(
1717 1717 'PullRequest',
1718 1718 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1719 1719 cascade="all, delete-orphan")
1720 1720 pull_requests_target = relationship(
1721 1721 'PullRequest',
1722 1722 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1723 1723 cascade="all, delete-orphan")
1724 1724 ui = relationship('RepoRhodeCodeUi', cascade="all")
1725 1725 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1726 1726 integrations = relationship('Integration', cascade="all, delete-orphan")
1727 1727
1728 1728 scoped_tokens = relationship('UserApiKeys', cascade="all")
1729 1729
1730 1730 # no cascade, set NULL
1731 1731 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1732 1732
1733 1733 def __unicode__(self):
1734 1734 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1735 1735 safe_unicode(self.repo_name))
1736 1736
1737 1737 @hybrid_property
1738 1738 def description_safe(self):
1739 1739 from rhodecode.lib import helpers as h
1740 1740 return h.escape(self.description)
1741 1741
1742 1742 @hybrid_property
1743 1743 def landing_rev(self):
1744 1744 # always should return [rev_type, rev]
1745 1745 if self._landing_revision:
1746 1746 _rev_info = self._landing_revision.split(':')
1747 1747 if len(_rev_info) < 2:
1748 1748 _rev_info.insert(0, 'rev')
1749 1749 return [_rev_info[0], _rev_info[1]]
1750 1750 return [None, None]
1751 1751
1752 1752 @landing_rev.setter
1753 1753 def landing_rev(self, val):
1754 1754 if ':' not in val:
1755 1755 raise ValueError('value must be delimited with `:` and consist '
1756 1756 'of <rev_type>:<rev>, got %s instead' % val)
1757 1757 self._landing_revision = val
1758 1758
1759 1759 @hybrid_property
1760 1760 def locked(self):
1761 1761 if self._locked:
1762 1762 user_id, timelocked, reason = self._locked.split(':')
1763 1763 lock_values = int(user_id), timelocked, reason
1764 1764 else:
1765 1765 lock_values = [None, None, None]
1766 1766 return lock_values
1767 1767
1768 1768 @locked.setter
1769 1769 def locked(self, val):
1770 1770 if val and isinstance(val, (list, tuple)):
1771 1771 self._locked = ':'.join(map(str, val))
1772 1772 else:
1773 1773 self._locked = None
1774 1774
1775 1775 @hybrid_property
1776 1776 def changeset_cache(self):
1777 1777 from rhodecode.lib.vcs.backends.base import EmptyCommit
1778 1778 dummy = EmptyCommit().__json__()
1779 1779 if not self._changeset_cache:
1780 1780 dummy['source_repo_id'] = self.repo_id
1781 1781 return json.loads(json.dumps(dummy))
1782 1782
1783 1783 try:
1784 1784 return json.loads(self._changeset_cache)
1785 1785 except TypeError:
1786 1786 return dummy
1787 1787 except Exception:
1788 1788 log.error(traceback.format_exc())
1789 1789 return dummy
1790 1790
1791 1791 @changeset_cache.setter
1792 1792 def changeset_cache(self, val):
1793 1793 try:
1794 1794 self._changeset_cache = json.dumps(val)
1795 1795 except Exception:
1796 1796 log.error(traceback.format_exc())
1797 1797
1798 1798 @hybrid_property
1799 1799 def repo_name(self):
1800 1800 return self._repo_name
1801 1801
1802 1802 @repo_name.setter
1803 1803 def repo_name(self, value):
1804 1804 self._repo_name = value
1805 1805 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1806 1806
1807 1807 @classmethod
1808 1808 def normalize_repo_name(cls, repo_name):
1809 1809 """
1810 1810 Normalizes os specific repo_name to the format internally stored inside
1811 1811 database using URL_SEP
1812 1812
1813 1813 :param cls:
1814 1814 :param repo_name:
1815 1815 """
1816 1816 return cls.NAME_SEP.join(repo_name.split(os.sep))
1817 1817
1818 1818 @classmethod
1819 1819 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1820 1820 session = Session()
1821 1821 q = session.query(cls).filter(cls.repo_name == repo_name)
1822 1822
1823 1823 if cache:
1824 1824 if identity_cache:
1825 1825 val = cls.identity_cache(session, 'repo_name', repo_name)
1826 1826 if val:
1827 1827 return val
1828 1828 else:
1829 1829 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1830 1830 q = q.options(
1831 1831 FromCache("sql_cache_short", cache_key))
1832 1832
1833 1833 return q.scalar()
1834 1834
1835 1835 @classmethod
1836 1836 def get_by_id_or_repo_name(cls, repoid):
1837 1837 if isinstance(repoid, (int, long)):
1838 1838 try:
1839 1839 repo = cls.get(repoid)
1840 1840 except ValueError:
1841 1841 repo = None
1842 1842 else:
1843 1843 repo = cls.get_by_repo_name(repoid)
1844 1844 return repo
1845 1845
1846 1846 @classmethod
1847 1847 def get_by_full_path(cls, repo_full_path):
1848 1848 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1849 1849 repo_name = cls.normalize_repo_name(repo_name)
1850 1850 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1851 1851
1852 1852 @classmethod
1853 1853 def get_repo_forks(cls, repo_id):
1854 1854 return cls.query().filter(Repository.fork_id == repo_id)
1855 1855
1856 1856 @classmethod
1857 1857 def base_path(cls):
1858 1858 """
1859 1859 Returns base path when all repos are stored
1860 1860
1861 1861 :param cls:
1862 1862 """
1863 1863 q = Session().query(RhodeCodeUi)\
1864 1864 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1865 1865 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1866 1866 return q.one().ui_value
1867 1867
1868 1868 @classmethod
1869 1869 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1870 1870 case_insensitive=True, archived=False):
1871 1871 q = Repository.query()
1872 1872
1873 1873 if not archived:
1874 1874 q = q.filter(Repository.archived.isnot(true()))
1875 1875
1876 1876 if not isinstance(user_id, Optional):
1877 1877 q = q.filter(Repository.user_id == user_id)
1878 1878
1879 1879 if not isinstance(group_id, Optional):
1880 1880 q = q.filter(Repository.group_id == group_id)
1881 1881
1882 1882 if case_insensitive:
1883 1883 q = q.order_by(func.lower(Repository.repo_name))
1884 1884 else:
1885 1885 q = q.order_by(Repository.repo_name)
1886 1886
1887 1887 return q.all()
1888 1888
1889 1889 @property
1890 1890 def repo_uid(self):
1891 1891 return '_{}'.format(self.repo_id)
1892 1892
1893 1893 @property
1894 1894 def forks(self):
1895 1895 """
1896 1896 Return forks of this repo
1897 1897 """
1898 1898 return Repository.get_repo_forks(self.repo_id)
1899 1899
1900 1900 @property
1901 1901 def parent(self):
1902 1902 """
1903 1903 Returns fork parent
1904 1904 """
1905 1905 return self.fork
1906 1906
1907 1907 @property
1908 1908 def just_name(self):
1909 1909 return self.repo_name.split(self.NAME_SEP)[-1]
1910 1910
1911 1911 @property
1912 1912 def groups_with_parents(self):
1913 1913 groups = []
1914 1914 if self.group is None:
1915 1915 return groups
1916 1916
1917 1917 cur_gr = self.group
1918 1918 groups.insert(0, cur_gr)
1919 1919 while 1:
1920 1920 gr = getattr(cur_gr, 'parent_group', None)
1921 1921 cur_gr = cur_gr.parent_group
1922 1922 if gr is None:
1923 1923 break
1924 1924 groups.insert(0, gr)
1925 1925
1926 1926 return groups
1927 1927
1928 1928 @property
1929 1929 def groups_and_repo(self):
1930 1930 return self.groups_with_parents, self
1931 1931
1932 1932 @LazyProperty
1933 1933 def repo_path(self):
1934 1934 """
1935 1935 Returns base full path for that repository means where it actually
1936 1936 exists on a filesystem
1937 1937 """
1938 1938 q = Session().query(RhodeCodeUi).filter(
1939 1939 RhodeCodeUi.ui_key == self.NAME_SEP)
1940 1940 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1941 1941 return q.one().ui_value
1942 1942
1943 1943 @property
1944 1944 def repo_full_path(self):
1945 1945 p = [self.repo_path]
1946 1946 # we need to split the name by / since this is how we store the
1947 1947 # names in the database, but that eventually needs to be converted
1948 1948 # into a valid system path
1949 1949 p += self.repo_name.split(self.NAME_SEP)
1950 1950 return os.path.join(*map(safe_unicode, p))
1951 1951
1952 1952 @property
1953 1953 def cache_keys(self):
1954 1954 """
1955 1955 Returns associated cache keys for that repo
1956 1956 """
1957 1957 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1958 1958 repo_id=self.repo_id)
1959 1959 return CacheKey.query()\
1960 1960 .filter(CacheKey.cache_args == invalidation_namespace)\
1961 1961 .order_by(CacheKey.cache_key)\
1962 1962 .all()
1963 1963
1964 1964 @property
1965 1965 def cached_diffs_relative_dir(self):
1966 1966 """
1967 1967 Return a relative to the repository store path of cached diffs
1968 1968 used for safe display for users, who shouldn't know the absolute store
1969 1969 path
1970 1970 """
1971 1971 return os.path.join(
1972 1972 os.path.dirname(self.repo_name),
1973 1973 self.cached_diffs_dir.split(os.path.sep)[-1])
1974 1974
1975 1975 @property
1976 1976 def cached_diffs_dir(self):
1977 1977 path = self.repo_full_path
1978 1978 return os.path.join(
1979 1979 os.path.dirname(path),
1980 1980 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1981 1981
1982 1982 def cached_diffs(self):
1983 1983 diff_cache_dir = self.cached_diffs_dir
1984 1984 if os.path.isdir(diff_cache_dir):
1985 1985 return os.listdir(diff_cache_dir)
1986 1986 return []
1987 1987
1988 1988 def shadow_repos(self):
1989 1989 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1990 1990 return [
1991 1991 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1992 1992 if x.startswith(shadow_repos_pattern)]
1993 1993
1994 1994 def get_new_name(self, repo_name):
1995 1995 """
1996 1996 returns new full repository name based on assigned group and new new
1997 1997
1998 1998 :param group_name:
1999 1999 """
2000 2000 path_prefix = self.group.full_path_splitted if self.group else []
2001 2001 return self.NAME_SEP.join(path_prefix + [repo_name])
2002 2002
2003 2003 @property
2004 2004 def _config(self):
2005 2005 """
2006 2006 Returns db based config object.
2007 2007 """
2008 2008 from rhodecode.lib.utils import make_db_config
2009 2009 return make_db_config(clear_session=False, repo=self)
2010 2010
2011 2011 def permissions(self, with_admins=True, with_owner=True,
2012 2012 expand_from_user_groups=False):
2013 2013 """
2014 2014 Permissions for repositories
2015 2015 """
2016 2016 _admin_perm = 'repository.admin'
2017 2017
2018 2018 owner_row = []
2019 2019 if with_owner:
2020 2020 usr = AttributeDict(self.user.get_dict())
2021 2021 usr.owner_row = True
2022 2022 usr.permission = _admin_perm
2023 2023 usr.permission_id = None
2024 2024 owner_row.append(usr)
2025 2025
2026 2026 super_admin_ids = []
2027 2027 super_admin_rows = []
2028 2028 if with_admins:
2029 2029 for usr in User.get_all_super_admins():
2030 2030 super_admin_ids.append(usr.user_id)
2031 2031 # if this admin is also owner, don't double the record
2032 2032 if usr.user_id == owner_row[0].user_id:
2033 2033 owner_row[0].admin_row = True
2034 2034 else:
2035 2035 usr = AttributeDict(usr.get_dict())
2036 2036 usr.admin_row = True
2037 2037 usr.permission = _admin_perm
2038 2038 usr.permission_id = None
2039 2039 super_admin_rows.append(usr)
2040 2040
2041 2041 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2042 2042 q = q.options(joinedload(UserRepoToPerm.repository),
2043 2043 joinedload(UserRepoToPerm.user),
2044 2044 joinedload(UserRepoToPerm.permission),)
2045 2045
2046 2046 # get owners and admins and permissions. We do a trick of re-writing
2047 2047 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2048 2048 # has a global reference and changing one object propagates to all
2049 2049 # others. This means if admin is also an owner admin_row that change
2050 2050 # would propagate to both objects
2051 2051 perm_rows = []
2052 2052 for _usr in q.all():
2053 2053 usr = AttributeDict(_usr.user.get_dict())
2054 2054 # if this user is also owner/admin, mark as duplicate record
2055 2055 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2056 2056 usr.duplicate_perm = True
2057 2057 # also check if this permission is maybe used by branch_permissions
2058 2058 if _usr.branch_perm_entry:
2059 2059 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2060 2060
2061 2061 usr.permission = _usr.permission.permission_name
2062 2062 usr.permission_id = _usr.repo_to_perm_id
2063 2063 perm_rows.append(usr)
2064 2064
2065 2065 # filter the perm rows by 'default' first and then sort them by
2066 2066 # admin,write,read,none permissions sorted again alphabetically in
2067 2067 # each group
2068 2068 perm_rows = sorted(perm_rows, key=display_user_sort)
2069 2069
2070 2070 user_groups_rows = []
2071 2071 if expand_from_user_groups:
2072 2072 for ug in self.permission_user_groups(with_members=True):
2073 2073 for user_data in ug.members:
2074 2074 user_groups_rows.append(user_data)
2075 2075
2076 2076 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2077 2077
2078 2078 def permission_user_groups(self, with_members=True):
2079 2079 q = UserGroupRepoToPerm.query()\
2080 2080 .filter(UserGroupRepoToPerm.repository == self)
2081 2081 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2082 2082 joinedload(UserGroupRepoToPerm.users_group),
2083 2083 joinedload(UserGroupRepoToPerm.permission),)
2084 2084
2085 2085 perm_rows = []
2086 2086 for _user_group in q.all():
2087 2087 entry = AttributeDict(_user_group.users_group.get_dict())
2088 2088 entry.permission = _user_group.permission.permission_name
2089 2089 if with_members:
2090 2090 entry.members = [x.user.get_dict()
2091 2091 for x in _user_group.users_group.members]
2092 2092 perm_rows.append(entry)
2093 2093
2094 2094 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2095 2095 return perm_rows
2096 2096
2097 2097 def get_api_data(self, include_secrets=False):
2098 2098 """
2099 2099 Common function for generating repo api data
2100 2100
2101 2101 :param include_secrets: See :meth:`User.get_api_data`.
2102 2102
2103 2103 """
2104 2104 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2105 2105 # move this methods on models level.
2106 2106 from rhodecode.model.settings import SettingsModel
2107 2107 from rhodecode.model.repo import RepoModel
2108 2108
2109 2109 repo = self
2110 2110 _user_id, _time, _reason = self.locked
2111 2111
2112 2112 data = {
2113 2113 'repo_id': repo.repo_id,
2114 2114 'repo_name': repo.repo_name,
2115 2115 'repo_type': repo.repo_type,
2116 2116 'clone_uri': repo.clone_uri or '',
2117 2117 'push_uri': repo.push_uri or '',
2118 2118 'url': RepoModel().get_url(self),
2119 2119 'private': repo.private,
2120 2120 'created_on': repo.created_on,
2121 2121 'description': repo.description_safe,
2122 2122 'landing_rev': repo.landing_rev,
2123 2123 'owner': repo.user.username,
2124 2124 'fork_of': repo.fork.repo_name if repo.fork else None,
2125 2125 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2126 2126 'enable_statistics': repo.enable_statistics,
2127 2127 'enable_locking': repo.enable_locking,
2128 2128 'enable_downloads': repo.enable_downloads,
2129 2129 'last_changeset': repo.changeset_cache,
2130 2130 'locked_by': User.get(_user_id).get_api_data(
2131 2131 include_secrets=include_secrets) if _user_id else None,
2132 2132 'locked_date': time_to_datetime(_time) if _time else None,
2133 2133 'lock_reason': _reason if _reason else None,
2134 2134 }
2135 2135
2136 2136 # TODO: mikhail: should be per-repo settings here
2137 2137 rc_config = SettingsModel().get_all_settings()
2138 2138 repository_fields = str2bool(
2139 2139 rc_config.get('rhodecode_repository_fields'))
2140 2140 if repository_fields:
2141 2141 for f in self.extra_fields:
2142 2142 data[f.field_key_prefixed] = f.field_value
2143 2143
2144 2144 return data
2145 2145
2146 2146 @classmethod
2147 2147 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2148 2148 if not lock_time:
2149 2149 lock_time = time.time()
2150 2150 if not lock_reason:
2151 2151 lock_reason = cls.LOCK_AUTOMATIC
2152 2152 repo.locked = [user_id, lock_time, lock_reason]
2153 2153 Session().add(repo)
2154 2154 Session().commit()
2155 2155
2156 2156 @classmethod
2157 2157 def unlock(cls, repo):
2158 2158 repo.locked = None
2159 2159 Session().add(repo)
2160 2160 Session().commit()
2161 2161
2162 2162 @classmethod
2163 2163 def getlock(cls, repo):
2164 2164 return repo.locked
2165 2165
2166 2166 def is_user_lock(self, user_id):
2167 2167 if self.lock[0]:
2168 2168 lock_user_id = safe_int(self.lock[0])
2169 2169 user_id = safe_int(user_id)
2170 2170 # both are ints, and they are equal
2171 2171 return all([lock_user_id, user_id]) and lock_user_id == user_id
2172 2172
2173 2173 return False
2174 2174
2175 2175 def get_locking_state(self, action, user_id, only_when_enabled=True):
2176 2176 """
2177 2177 Checks locking on this repository, if locking is enabled and lock is
2178 2178 present returns a tuple of make_lock, locked, locked_by.
2179 2179 make_lock can have 3 states None (do nothing) True, make lock
2180 2180 False release lock, This value is later propagated to hooks, which
2181 2181 do the locking. Think about this as signals passed to hooks what to do.
2182 2182
2183 2183 """
2184 2184 # TODO: johbo: This is part of the business logic and should be moved
2185 2185 # into the RepositoryModel.
2186 2186
2187 2187 if action not in ('push', 'pull'):
2188 2188 raise ValueError("Invalid action value: %s" % repr(action))
2189 2189
2190 2190 # defines if locked error should be thrown to user
2191 2191 currently_locked = False
2192 2192 # defines if new lock should be made, tri-state
2193 2193 make_lock = None
2194 2194 repo = self
2195 2195 user = User.get(user_id)
2196 2196
2197 2197 lock_info = repo.locked
2198 2198
2199 2199 if repo and (repo.enable_locking or not only_when_enabled):
2200 2200 if action == 'push':
2201 2201 # check if it's already locked !, if it is compare users
2202 2202 locked_by_user_id = lock_info[0]
2203 2203 if user.user_id == locked_by_user_id:
2204 2204 log.debug(
2205 2205 'Got `push` action from user %s, now unlocking', user)
2206 2206 # unlock if we have push from user who locked
2207 2207 make_lock = False
2208 2208 else:
2209 2209 # we're not the same user who locked, ban with
2210 2210 # code defined in settings (default is 423 HTTP Locked) !
2211 2211 log.debug('Repo %s is currently locked by %s', repo, user)
2212 2212 currently_locked = True
2213 2213 elif action == 'pull':
2214 2214 # [0] user [1] date
2215 2215 if lock_info[0] and lock_info[1]:
2216 2216 log.debug('Repo %s is currently locked by %s', repo, user)
2217 2217 currently_locked = True
2218 2218 else:
2219 2219 log.debug('Setting lock on repo %s by %s', repo, user)
2220 2220 make_lock = True
2221 2221
2222 2222 else:
2223 2223 log.debug('Repository %s do not have locking enabled', repo)
2224 2224
2225 2225 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2226 2226 make_lock, currently_locked, lock_info)
2227 2227
2228 2228 from rhodecode.lib.auth import HasRepoPermissionAny
2229 2229 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2230 2230 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2231 2231 # if we don't have at least write permission we cannot make a lock
2232 2232 log.debug('lock state reset back to FALSE due to lack '
2233 2233 'of at least read permission')
2234 2234 make_lock = False
2235 2235
2236 2236 return make_lock, currently_locked, lock_info
2237 2237
2238 2238 @property
2239 2239 def last_commit_cache_update_diff(self):
2240 2240 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2241 2241
2242 2242 @property
2243 2243 def last_commit_change(self):
2244 2244 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2245 2245 empty_date = datetime.datetime.fromtimestamp(0)
2246 2246 date_latest = self.changeset_cache.get('date', empty_date)
2247 2247 try:
2248 2248 return parse_datetime(date_latest)
2249 2249 except Exception:
2250 2250 return empty_date
2251 2251
2252 2252 @property
2253 2253 def last_db_change(self):
2254 2254 return self.updated_on
2255 2255
2256 2256 @property
2257 2257 def clone_uri_hidden(self):
2258 2258 clone_uri = self.clone_uri
2259 2259 if clone_uri:
2260 2260 import urlobject
2261 2261 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2262 2262 if url_obj.password:
2263 2263 clone_uri = url_obj.with_password('*****')
2264 2264 return clone_uri
2265 2265
2266 2266 @property
2267 2267 def push_uri_hidden(self):
2268 2268 push_uri = self.push_uri
2269 2269 if push_uri:
2270 2270 import urlobject
2271 2271 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2272 2272 if url_obj.password:
2273 2273 push_uri = url_obj.with_password('*****')
2274 2274 return push_uri
2275 2275
2276 2276 def clone_url(self, **override):
2277 2277 from rhodecode.model.settings import SettingsModel
2278 2278
2279 2279 uri_tmpl = None
2280 2280 if 'with_id' in override:
2281 2281 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2282 2282 del override['with_id']
2283 2283
2284 2284 if 'uri_tmpl' in override:
2285 2285 uri_tmpl = override['uri_tmpl']
2286 2286 del override['uri_tmpl']
2287 2287
2288 2288 ssh = False
2289 2289 if 'ssh' in override:
2290 2290 ssh = True
2291 2291 del override['ssh']
2292 2292
2293 2293 # we didn't override our tmpl from **overrides
2294 2294 request = get_current_request()
2295 2295 if not uri_tmpl:
2296 2296 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2297 2297 rc_config = request.call_context.rc_config
2298 2298 else:
2299 2299 rc_config = SettingsModel().get_all_settings(cache=True)
2300 2300 if ssh:
2301 2301 uri_tmpl = rc_config.get(
2302 2302 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2303 2303 else:
2304 2304 uri_tmpl = rc_config.get(
2305 2305 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2306 2306
2307 2307 return get_clone_url(request=request,
2308 2308 uri_tmpl=uri_tmpl,
2309 2309 repo_name=self.repo_name,
2310 2310 repo_id=self.repo_id, **override)
2311 2311
2312 2312 def set_state(self, state):
2313 2313 self.repo_state = state
2314 2314 Session().add(self)
2315 2315 #==========================================================================
2316 2316 # SCM PROPERTIES
2317 2317 #==========================================================================
2318 2318
2319 2319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2320 2320 return get_commit_safe(
2321 2321 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2322 2322
2323 2323 def get_changeset(self, rev=None, pre_load=None):
2324 2324 warnings.warn("Use get_commit", DeprecationWarning)
2325 2325 commit_id = None
2326 2326 commit_idx = None
2327 2327 if isinstance(rev, compat.string_types):
2328 2328 commit_id = rev
2329 2329 else:
2330 2330 commit_idx = rev
2331 2331 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2332 2332 pre_load=pre_load)
2333 2333
2334 2334 def get_landing_commit(self):
2335 2335 """
2336 2336 Returns landing commit, or if that doesn't exist returns the tip
2337 2337 """
2338 2338 _rev_type, _rev = self.landing_rev
2339 2339 commit = self.get_commit(_rev)
2340 2340 if isinstance(commit, EmptyCommit):
2341 2341 return self.get_commit()
2342 2342 return commit
2343 2343
2344 2344 def flush_commit_cache(self):
2345 2345 self.update_commit_cache(cs_cache={'raw_id':'0'})
2346 2346 self.update_commit_cache()
2347 2347
2348 2348 def update_commit_cache(self, cs_cache=None, config=None):
2349 2349 """
2350 2350 Update cache of last commit for repository, keys should be::
2351 2351
2352 2352 source_repo_id
2353 2353 short_id
2354 2354 raw_id
2355 2355 revision
2356 2356 parents
2357 2357 message
2358 2358 date
2359 2359 author
2360 2360 updated_on
2361 2361
2362 2362 """
2363 2363 from rhodecode.lib.vcs.backends.base import BaseChangeset
2364 2364 if cs_cache is None:
2365 2365 # use no-cache version here
2366 2366 scm_repo = self.scm_instance(cache=False, config=config)
2367 2367
2368 2368 empty = scm_repo is None or scm_repo.is_empty()
2369 2369 if not empty:
2370 2370 cs_cache = scm_repo.get_commit(
2371 2371 pre_load=["author", "date", "message", "parents", "branch"])
2372 2372 else:
2373 2373 cs_cache = EmptyCommit()
2374 2374
2375 2375 if isinstance(cs_cache, BaseChangeset):
2376 2376 cs_cache = cs_cache.__json__()
2377 2377
2378 2378 def is_outdated(new_cs_cache):
2379 2379 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2380 2380 new_cs_cache['revision'] != self.changeset_cache['revision']):
2381 2381 return True
2382 2382 return False
2383 2383
2384 2384 # check if we have maybe already latest cached revision
2385 2385 if is_outdated(cs_cache) or not self.changeset_cache:
2386 2386 _default = datetime.datetime.utcnow()
2387 2387 last_change = cs_cache.get('date') or _default
2388 2388 # we check if last update is newer than the new value
2389 2389 # if yes, we use the current timestamp instead. Imagine you get
2390 2390 # old commit pushed 1y ago, we'd set last update 1y to ago.
2391 2391 last_change_timestamp = datetime_to_time(last_change)
2392 2392 current_timestamp = datetime_to_time(last_change)
2393 2393 if last_change_timestamp > current_timestamp:
2394 2394 cs_cache['date'] = _default
2395 2395
2396 2396 cs_cache['updated_on'] = time.time()
2397 2397 self.changeset_cache = cs_cache
2398 2398 self.updated_on = last_change
2399 2399 Session().add(self)
2400 2400 Session().commit()
2401 2401
2402 2402 log.debug('updated repo `%s` with new commit cache %s',
2403 2403 self.repo_name, cs_cache)
2404 2404 else:
2405 2405 cs_cache = self.changeset_cache
2406 2406 cs_cache['updated_on'] = time.time()
2407 2407 self.changeset_cache = cs_cache
2408 2408 Session().add(self)
2409 2409 Session().commit()
2410 2410
2411 2411 log.debug('Skipping update_commit_cache for repo:`%s` '
2412 2412 'commit already with latest changes', self.repo_name)
2413 2413
2414 2414 @property
2415 2415 def tip(self):
2416 2416 return self.get_commit('tip')
2417 2417
2418 2418 @property
2419 2419 def author(self):
2420 2420 return self.tip.author
2421 2421
2422 2422 @property
2423 2423 def last_change(self):
2424 2424 return self.scm_instance().last_change
2425 2425
2426 2426 def get_comments(self, revisions=None):
2427 2427 """
2428 2428 Returns comments for this repository grouped by revisions
2429 2429
2430 2430 :param revisions: filter query by revisions only
2431 2431 """
2432 2432 cmts = ChangesetComment.query()\
2433 2433 .filter(ChangesetComment.repo == self)
2434 2434 if revisions:
2435 2435 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2436 2436 grouped = collections.defaultdict(list)
2437 2437 for cmt in cmts.all():
2438 2438 grouped[cmt.revision].append(cmt)
2439 2439 return grouped
2440 2440
2441 2441 def statuses(self, revisions=None):
2442 2442 """
2443 2443 Returns statuses for this repository
2444 2444
2445 2445 :param revisions: list of revisions to get statuses for
2446 2446 """
2447 2447 statuses = ChangesetStatus.query()\
2448 2448 .filter(ChangesetStatus.repo == self)\
2449 2449 .filter(ChangesetStatus.version == 0)
2450 2450
2451 2451 if revisions:
2452 2452 # Try doing the filtering in chunks to avoid hitting limits
2453 2453 size = 500
2454 2454 status_results = []
2455 2455 for chunk in xrange(0, len(revisions), size):
2456 2456 status_results += statuses.filter(
2457 2457 ChangesetStatus.revision.in_(
2458 2458 revisions[chunk: chunk+size])
2459 2459 ).all()
2460 2460 else:
2461 2461 status_results = statuses.all()
2462 2462
2463 2463 grouped = {}
2464 2464
2465 2465 # maybe we have open new pullrequest without a status?
2466 2466 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2467 2467 status_lbl = ChangesetStatus.get_status_lbl(stat)
2468 2468 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2469 2469 for rev in pr.revisions:
2470 2470 pr_id = pr.pull_request_id
2471 2471 pr_repo = pr.target_repo.repo_name
2472 2472 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2473 2473
2474 2474 for stat in status_results:
2475 2475 pr_id = pr_repo = None
2476 2476 if stat.pull_request:
2477 2477 pr_id = stat.pull_request.pull_request_id
2478 2478 pr_repo = stat.pull_request.target_repo.repo_name
2479 2479 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2480 2480 pr_id, pr_repo]
2481 2481 return grouped
2482 2482
2483 2483 # ==========================================================================
2484 2484 # SCM CACHE INSTANCE
2485 2485 # ==========================================================================
2486 2486
2487 2487 def scm_instance(self, **kwargs):
2488 2488 import rhodecode
2489 2489
2490 2490 # Passing a config will not hit the cache currently only used
2491 2491 # for repo2dbmapper
2492 2492 config = kwargs.pop('config', None)
2493 2493 cache = kwargs.pop('cache', None)
2494 2494 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2495 2495 if vcs_full_cache is not None:
2496 2496 # allows override global config
2497 2497 full_cache = vcs_full_cache
2498 2498 else:
2499 2499 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2500 2500 # if cache is NOT defined use default global, else we have a full
2501 2501 # control over cache behaviour
2502 2502 if cache is None and full_cache and not config:
2503 2503 log.debug('Initializing pure cached instance for %s', self.repo_path)
2504 2504 return self._get_instance_cached()
2505 2505
2506 2506 # cache here is sent to the "vcs server"
2507 2507 return self._get_instance(cache=bool(cache), config=config)
2508 2508
2509 2509 def _get_instance_cached(self):
2510 2510 from rhodecode.lib import rc_cache
2511 2511
2512 2512 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2513 2513 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2514 2514 repo_id=self.repo_id)
2515 2515 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2516 2516
2517 2517 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2518 2518 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2519 2519 return self._get_instance(repo_state_uid=_cache_state_uid)
2520 2520
2521 2521 # we must use thread scoped cache here,
2522 2522 # because each thread of gevent needs it's own not shared connection and cache
2523 2523 # we also alter `args` so the cache key is individual for every green thread.
2524 2524 inv_context_manager = rc_cache.InvalidationContext(
2525 2525 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2526 2526 thread_scoped=True)
2527 2527 with inv_context_manager as invalidation_context:
2528 2528 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2529 2529 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2530 2530
2531 2531 # re-compute and store cache if we get invalidate signal
2532 2532 if invalidation_context.should_invalidate():
2533 2533 instance = get_instance_cached.refresh(*args)
2534 2534 else:
2535 2535 instance = get_instance_cached(*args)
2536 2536
2537 2537 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2538 2538 return instance
2539 2539
2540 2540 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2541 2541 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2542 2542 self.repo_type, self.repo_path, cache)
2543 2543 config = config or self._config
2544 2544 custom_wire = {
2545 2545 'cache': cache, # controls the vcs.remote cache
2546 2546 'repo_state_uid': repo_state_uid
2547 2547 }
2548 2548 repo = get_vcs_instance(
2549 2549 repo_path=safe_str(self.repo_full_path),
2550 2550 config=config,
2551 2551 with_wire=custom_wire,
2552 2552 create=False,
2553 2553 _vcs_alias=self.repo_type)
2554 2554 if repo is not None:
2555 2555 repo.count() # cache rebuild
2556 2556 return repo
2557 2557
2558 2558 def get_shadow_repository_path(self, workspace_id):
2559 2559 from rhodecode.lib.vcs.backends.base import BaseRepository
2560 2560 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2561 2561 self.repo_full_path, self.repo_id, workspace_id)
2562 2562 return shadow_repo_path
2563 2563
2564 2564 def __json__(self):
2565 2565 return {'landing_rev': self.landing_rev}
2566 2566
2567 2567 def get_dict(self):
2568 2568
2569 2569 # Since we transformed `repo_name` to a hybrid property, we need to
2570 2570 # keep compatibility with the code which uses `repo_name` field.
2571 2571
2572 2572 result = super(Repository, self).get_dict()
2573 2573 result['repo_name'] = result.pop('_repo_name', None)
2574 2574 return result
2575 2575
2576 2576
2577 2577 class RepoGroup(Base, BaseModel):
2578 2578 __tablename__ = 'groups'
2579 2579 __table_args__ = (
2580 2580 UniqueConstraint('group_name', 'group_parent_id'),
2581 2581 base_table_args,
2582 2582 )
2583 2583 __mapper_args__ = {'order_by': 'group_name'}
2584 2584
2585 2585 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2586 2586
2587 2587 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2588 2588 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2589 2589 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2590 2590 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2591 2591 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2592 2592 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2593 2593 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2594 2594 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2595 2595 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2596 2596 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2597 2597 _changeset_cache = Column(
2598 2598 "changeset_cache", LargeBinary(), nullable=True) # JSON data
2599 2599
2600 2600 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2601 2601 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2602 2602 parent_group = relationship('RepoGroup', remote_side=group_id)
2603 2603 user = relationship('User')
2604 2604 integrations = relationship('Integration', cascade="all, delete-orphan")
2605 2605
2606 2606 # no cascade, set NULL
2607 2607 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2608 2608
2609 2609 def __init__(self, group_name='', parent_group=None):
2610 2610 self.group_name = group_name
2611 2611 self.parent_group = parent_group
2612 2612
2613 2613 def __unicode__(self):
2614 2614 return u"<%s('id:%s:%s')>" % (
2615 2615 self.__class__.__name__, self.group_id, self.group_name)
2616 2616
2617 2617 @hybrid_property
2618 2618 def group_name(self):
2619 2619 return self._group_name
2620 2620
2621 2621 @group_name.setter
2622 2622 def group_name(self, value):
2623 2623 self._group_name = value
2624 2624 self.group_name_hash = self.hash_repo_group_name(value)
2625 2625
2626 2626 @hybrid_property
2627 2627 def changeset_cache(self):
2628 2628 from rhodecode.lib.vcs.backends.base import EmptyCommit
2629 2629 dummy = EmptyCommit().__json__()
2630 2630 if not self._changeset_cache:
2631 2631 dummy['source_repo_id'] = ''
2632 2632 return json.loads(json.dumps(dummy))
2633 2633
2634 2634 try:
2635 2635 return json.loads(self._changeset_cache)
2636 2636 except TypeError:
2637 2637 return dummy
2638 2638 except Exception:
2639 2639 log.error(traceback.format_exc())
2640 2640 return dummy
2641 2641
2642 2642 @changeset_cache.setter
2643 2643 def changeset_cache(self, val):
2644 2644 try:
2645 2645 self._changeset_cache = json.dumps(val)
2646 2646 except Exception:
2647 2647 log.error(traceback.format_exc())
2648 2648
2649 2649 @validates('group_parent_id')
2650 2650 def validate_group_parent_id(self, key, val):
2651 2651 """
2652 2652 Check cycle references for a parent group to self
2653 2653 """
2654 2654 if self.group_id and val:
2655 2655 assert val != self.group_id
2656 2656
2657 2657 return val
2658 2658
2659 2659 @hybrid_property
2660 2660 def description_safe(self):
2661 2661 from rhodecode.lib import helpers as h
2662 2662 return h.escape(self.group_description)
2663 2663
2664 2664 @classmethod
2665 2665 def hash_repo_group_name(cls, repo_group_name):
2666 2666 val = remove_formatting(repo_group_name)
2667 2667 val = safe_str(val).lower()
2668 2668 chars = []
2669 2669 for c in val:
2670 2670 if c not in string.ascii_letters:
2671 2671 c = str(ord(c))
2672 2672 chars.append(c)
2673 2673
2674 2674 return ''.join(chars)
2675 2675
2676 2676 @classmethod
2677 2677 def _generate_choice(cls, repo_group):
2678 2678 from webhelpers.html import literal as _literal
2679 2679 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2680 2680 return repo_group.group_id, _name(repo_group.full_path_splitted)
2681 2681
2682 2682 @classmethod
2683 2683 def groups_choices(cls, groups=None, show_empty_group=True):
2684 2684 if not groups:
2685 2685 groups = cls.query().all()
2686 2686
2687 2687 repo_groups = []
2688 2688 if show_empty_group:
2689 2689 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2690 2690
2691 2691 repo_groups.extend([cls._generate_choice(x) for x in groups])
2692 2692
2693 2693 repo_groups = sorted(
2694 2694 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2695 2695 return repo_groups
2696 2696
2697 2697 @classmethod
2698 2698 def url_sep(cls):
2699 2699 return URL_SEP
2700 2700
2701 2701 @classmethod
2702 2702 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2703 2703 if case_insensitive:
2704 2704 gr = cls.query().filter(func.lower(cls.group_name)
2705 2705 == func.lower(group_name))
2706 2706 else:
2707 2707 gr = cls.query().filter(cls.group_name == group_name)
2708 2708 if cache:
2709 2709 name_key = _hash_key(group_name)
2710 2710 gr = gr.options(
2711 2711 FromCache("sql_cache_short", "get_group_%s" % name_key))
2712 2712 return gr.scalar()
2713 2713
2714 2714 @classmethod
2715 2715 def get_user_personal_repo_group(cls, user_id):
2716 2716 user = User.get(user_id)
2717 2717 if user.username == User.DEFAULT_USER:
2718 2718 return None
2719 2719
2720 2720 return cls.query()\
2721 2721 .filter(cls.personal == true()) \
2722 2722 .filter(cls.user == user) \
2723 2723 .order_by(cls.group_id.asc()) \
2724 2724 .first()
2725 2725
2726 2726 @classmethod
2727 2727 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2728 2728 case_insensitive=True):
2729 2729 q = RepoGroup.query()
2730 2730
2731 2731 if not isinstance(user_id, Optional):
2732 2732 q = q.filter(RepoGroup.user_id == user_id)
2733 2733
2734 2734 if not isinstance(group_id, Optional):
2735 2735 q = q.filter(RepoGroup.group_parent_id == group_id)
2736 2736
2737 2737 if case_insensitive:
2738 2738 q = q.order_by(func.lower(RepoGroup.group_name))
2739 2739 else:
2740 2740 q = q.order_by(RepoGroup.group_name)
2741 2741 return q.all()
2742 2742
2743 2743 @property
2744 2744 def parents(self, parents_recursion_limit = 10):
2745 2745 groups = []
2746 2746 if self.parent_group is None:
2747 2747 return groups
2748 2748 cur_gr = self.parent_group
2749 2749 groups.insert(0, cur_gr)
2750 2750 cnt = 0
2751 2751 while 1:
2752 2752 cnt += 1
2753 2753 gr = getattr(cur_gr, 'parent_group', None)
2754 2754 cur_gr = cur_gr.parent_group
2755 2755 if gr is None:
2756 2756 break
2757 2757 if cnt == parents_recursion_limit:
2758 2758 # this will prevent accidental infinit loops
2759 2759 log.error('more than %s parents found for group %s, stopping '
2760 2760 'recursive parent fetching', parents_recursion_limit, self)
2761 2761 break
2762 2762
2763 2763 groups.insert(0, gr)
2764 2764 return groups
2765 2765
2766 2766 @property
2767 2767 def last_commit_cache_update_diff(self):
2768 2768 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2769 2769
2770 2770 @property
2771 2771 def last_commit_change(self):
2772 2772 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2773 2773 empty_date = datetime.datetime.fromtimestamp(0)
2774 2774 date_latest = self.changeset_cache.get('date', empty_date)
2775 2775 try:
2776 2776 return parse_datetime(date_latest)
2777 2777 except Exception:
2778 2778 return empty_date
2779 2779
2780 2780 @property
2781 2781 def last_db_change(self):
2782 2782 return self.updated_on
2783 2783
2784 2784 @property
2785 2785 def children(self):
2786 2786 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2787 2787
2788 2788 @property
2789 2789 def name(self):
2790 2790 return self.group_name.split(RepoGroup.url_sep())[-1]
2791 2791
2792 2792 @property
2793 2793 def full_path(self):
2794 2794 return self.group_name
2795 2795
2796 2796 @property
2797 2797 def full_path_splitted(self):
2798 2798 return self.group_name.split(RepoGroup.url_sep())
2799 2799
2800 2800 @property
2801 2801 def repositories(self):
2802 2802 return Repository.query()\
2803 2803 .filter(Repository.group == self)\
2804 2804 .order_by(Repository.repo_name)
2805 2805
2806 2806 @property
2807 2807 def repositories_recursive_count(self):
2808 2808 cnt = self.repositories.count()
2809 2809
2810 2810 def children_count(group):
2811 2811 cnt = 0
2812 2812 for child in group.children:
2813 2813 cnt += child.repositories.count()
2814 2814 cnt += children_count(child)
2815 2815 return cnt
2816 2816
2817 2817 return cnt + children_count(self)
2818 2818
2819 2819 def _recursive_objects(self, include_repos=True, include_groups=True):
2820 2820 all_ = []
2821 2821
2822 2822 def _get_members(root_gr):
2823 2823 if include_repos:
2824 2824 for r in root_gr.repositories:
2825 2825 all_.append(r)
2826 2826 childs = root_gr.children.all()
2827 2827 if childs:
2828 2828 for gr in childs:
2829 2829 if include_groups:
2830 2830 all_.append(gr)
2831 2831 _get_members(gr)
2832 2832
2833 2833 root_group = []
2834 2834 if include_groups:
2835 2835 root_group = [self]
2836 2836
2837 2837 _get_members(self)
2838 2838 return root_group + all_
2839 2839
2840 2840 def recursive_groups_and_repos(self):
2841 2841 """
2842 2842 Recursive return all groups, with repositories in those groups
2843 2843 """
2844 2844 return self._recursive_objects()
2845 2845
2846 2846 def recursive_groups(self):
2847 2847 """
2848 2848 Returns all children groups for this group including children of children
2849 2849 """
2850 2850 return self._recursive_objects(include_repos=False)
2851 2851
2852 2852 def recursive_repos(self):
2853 2853 """
2854 2854 Returns all children repositories for this group
2855 2855 """
2856 2856 return self._recursive_objects(include_groups=False)
2857 2857
2858 2858 def get_new_name(self, group_name):
2859 2859 """
2860 2860 returns new full group name based on parent and new name
2861 2861
2862 2862 :param group_name:
2863 2863 """
2864 2864 path_prefix = (self.parent_group.full_path_splitted if
2865 2865 self.parent_group else [])
2866 2866 return RepoGroup.url_sep().join(path_prefix + [group_name])
2867 2867
2868 2868 def update_commit_cache(self, config=None):
2869 2869 """
2870 2870 Update cache of last changeset for newest repository inside this group, keys should be::
2871 2871
2872 2872 source_repo_id
2873 2873 short_id
2874 2874 raw_id
2875 2875 revision
2876 2876 parents
2877 2877 message
2878 2878 date
2879 2879 author
2880 2880
2881 2881 """
2882 2882 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2883 2883
2884 2884 def repo_groups_and_repos():
2885 2885 all_entries = OrderedDefaultDict(list)
2886 2886
2887 2887 def _get_members(root_gr, pos=0):
2888 2888
2889 2889 for repo in root_gr.repositories:
2890 2890 all_entries[root_gr].append(repo)
2891 2891
2892 2892 # fill in all parent positions
2893 2893 for parent_group in root_gr.parents:
2894 2894 all_entries[parent_group].extend(all_entries[root_gr])
2895 2895
2896 2896 children_groups = root_gr.children.all()
2897 2897 if children_groups:
2898 2898 for cnt, gr in enumerate(children_groups, 1):
2899 2899 _get_members(gr, pos=pos+cnt)
2900 2900
2901 2901 _get_members(root_gr=self)
2902 2902 return all_entries
2903 2903
2904 2904 empty_date = datetime.datetime.fromtimestamp(0)
2905 2905 for repo_group, repos in repo_groups_and_repos().items():
2906 2906
2907 2907 latest_repo_cs_cache = {}
2908 2908 _date_latest = empty_date
2909 2909 for repo in repos:
2910 2910 repo_cs_cache = repo.changeset_cache
2911 2911 date_latest = latest_repo_cs_cache.get('date', empty_date)
2912 2912 date_current = repo_cs_cache.get('date', empty_date)
2913 2913 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2914 2914 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2915 2915 latest_repo_cs_cache = repo_cs_cache
2916 2916 latest_repo_cs_cache['source_repo_id'] = repo.repo_id
2917 2917 _date_latest = parse_datetime(latest_repo_cs_cache['date'])
2918 2918
2919 2919 latest_repo_cs_cache['updated_on'] = time.time()
2920 2920 repo_group.changeset_cache = latest_repo_cs_cache
2921 2921 repo_group.updated_on = _date_latest
2922 2922 Session().add(repo_group)
2923 2923 Session().commit()
2924 2924
2925 2925 log.debug('updated repo group `%s` with new commit cache %s',
2926 2926 repo_group.group_name, latest_repo_cs_cache)
2927 2927
2928 2928 def permissions(self, with_admins=True, with_owner=True,
2929 2929 expand_from_user_groups=False):
2930 2930 """
2931 2931 Permissions for repository groups
2932 2932 """
2933 2933 _admin_perm = 'group.admin'
2934 2934
2935 2935 owner_row = []
2936 2936 if with_owner:
2937 2937 usr = AttributeDict(self.user.get_dict())
2938 2938 usr.owner_row = True
2939 2939 usr.permission = _admin_perm
2940 2940 owner_row.append(usr)
2941 2941
2942 2942 super_admin_ids = []
2943 2943 super_admin_rows = []
2944 2944 if with_admins:
2945 2945 for usr in User.get_all_super_admins():
2946 2946 super_admin_ids.append(usr.user_id)
2947 2947 # if this admin is also owner, don't double the record
2948 2948 if usr.user_id == owner_row[0].user_id:
2949 2949 owner_row[0].admin_row = True
2950 2950 else:
2951 2951 usr = AttributeDict(usr.get_dict())
2952 2952 usr.admin_row = True
2953 2953 usr.permission = _admin_perm
2954 2954 super_admin_rows.append(usr)
2955 2955
2956 2956 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2957 2957 q = q.options(joinedload(UserRepoGroupToPerm.group),
2958 2958 joinedload(UserRepoGroupToPerm.user),
2959 2959 joinedload(UserRepoGroupToPerm.permission),)
2960 2960
2961 2961 # get owners and admins and permissions. We do a trick of re-writing
2962 2962 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2963 2963 # has a global reference and changing one object propagates to all
2964 2964 # others. This means if admin is also an owner admin_row that change
2965 2965 # would propagate to both objects
2966 2966 perm_rows = []
2967 2967 for _usr in q.all():
2968 2968 usr = AttributeDict(_usr.user.get_dict())
2969 2969 # if this user is also owner/admin, mark as duplicate record
2970 2970 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2971 2971 usr.duplicate_perm = True
2972 2972 usr.permission = _usr.permission.permission_name
2973 2973 perm_rows.append(usr)
2974 2974
2975 2975 # filter the perm rows by 'default' first and then sort them by
2976 2976 # admin,write,read,none permissions sorted again alphabetically in
2977 2977 # each group
2978 2978 perm_rows = sorted(perm_rows, key=display_user_sort)
2979 2979
2980 2980 user_groups_rows = []
2981 2981 if expand_from_user_groups:
2982 2982 for ug in self.permission_user_groups(with_members=True):
2983 2983 for user_data in ug.members:
2984 2984 user_groups_rows.append(user_data)
2985 2985
2986 2986 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2987 2987
2988 2988 def permission_user_groups(self, with_members=False):
2989 2989 q = UserGroupRepoGroupToPerm.query()\
2990 2990 .filter(UserGroupRepoGroupToPerm.group == self)
2991 2991 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2992 2992 joinedload(UserGroupRepoGroupToPerm.users_group),
2993 2993 joinedload(UserGroupRepoGroupToPerm.permission),)
2994 2994
2995 2995 perm_rows = []
2996 2996 for _user_group in q.all():
2997 2997 entry = AttributeDict(_user_group.users_group.get_dict())
2998 2998 entry.permission = _user_group.permission.permission_name
2999 2999 if with_members:
3000 3000 entry.members = [x.user.get_dict()
3001 3001 for x in _user_group.users_group.members]
3002 3002 perm_rows.append(entry)
3003 3003
3004 3004 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3005 3005 return perm_rows
3006 3006
3007 3007 def get_api_data(self):
3008 3008 """
3009 3009 Common function for generating api data
3010 3010
3011 3011 """
3012 3012 group = self
3013 3013 data = {
3014 3014 'group_id': group.group_id,
3015 3015 'group_name': group.group_name,
3016 3016 'group_description': group.description_safe,
3017 3017 'parent_group': group.parent_group.group_name if group.parent_group else None,
3018 3018 'repositories': [x.repo_name for x in group.repositories],
3019 3019 'owner': group.user.username,
3020 3020 }
3021 3021 return data
3022 3022
3023 3023 def get_dict(self):
3024 3024 # Since we transformed `group_name` to a hybrid property, we need to
3025 3025 # keep compatibility with the code which uses `group_name` field.
3026 3026 result = super(RepoGroup, self).get_dict()
3027 3027 result['group_name'] = result.pop('_group_name', None)
3028 3028 return result
3029 3029
3030 3030
3031 3031 class Permission(Base, BaseModel):
3032 3032 __tablename__ = 'permissions'
3033 3033 __table_args__ = (
3034 3034 Index('p_perm_name_idx', 'permission_name'),
3035 3035 base_table_args,
3036 3036 )
3037 3037
3038 3038 PERMS = [
3039 3039 ('hg.admin', _('RhodeCode Super Administrator')),
3040 3040
3041 3041 ('repository.none', _('Repository no access')),
3042 3042 ('repository.read', _('Repository read access')),
3043 3043 ('repository.write', _('Repository write access')),
3044 3044 ('repository.admin', _('Repository admin access')),
3045 3045
3046 3046 ('group.none', _('Repository group no access')),
3047 3047 ('group.read', _('Repository group read access')),
3048 3048 ('group.write', _('Repository group write access')),
3049 3049 ('group.admin', _('Repository group admin access')),
3050 3050
3051 3051 ('usergroup.none', _('User group no access')),
3052 3052 ('usergroup.read', _('User group read access')),
3053 3053 ('usergroup.write', _('User group write access')),
3054 3054 ('usergroup.admin', _('User group admin access')),
3055 3055
3056 3056 ('branch.none', _('Branch no permissions')),
3057 3057 ('branch.merge', _('Branch access by web merge')),
3058 3058 ('branch.push', _('Branch access by push')),
3059 3059 ('branch.push_force', _('Branch access by push with force')),
3060 3060
3061 3061 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3062 3062 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3063 3063
3064 3064 ('hg.usergroup.create.false', _('User Group creation disabled')),
3065 3065 ('hg.usergroup.create.true', _('User Group creation enabled')),
3066 3066
3067 3067 ('hg.create.none', _('Repository creation disabled')),
3068 3068 ('hg.create.repository', _('Repository creation enabled')),
3069 3069 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3070 3070 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3071 3071
3072 3072 ('hg.fork.none', _('Repository forking disabled')),
3073 3073 ('hg.fork.repository', _('Repository forking enabled')),
3074 3074
3075 3075 ('hg.register.none', _('Registration disabled')),
3076 3076 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3077 3077 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3078 3078
3079 3079 ('hg.password_reset.enabled', _('Password reset enabled')),
3080 3080 ('hg.password_reset.hidden', _('Password reset hidden')),
3081 3081 ('hg.password_reset.disabled', _('Password reset disabled')),
3082 3082
3083 3083 ('hg.extern_activate.manual', _('Manual activation of external account')),
3084 3084 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3085 3085
3086 3086 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3087 3087 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3088 3088 ]
3089 3089
3090 3090 # definition of system default permissions for DEFAULT user, created on
3091 3091 # system setup
3092 3092 DEFAULT_USER_PERMISSIONS = [
3093 3093 # object perms
3094 3094 'repository.read',
3095 3095 'group.read',
3096 3096 'usergroup.read',
3097 3097 # branch, for backward compat we need same value as before so forced pushed
3098 3098 'branch.push_force',
3099 3099 # global
3100 3100 'hg.create.repository',
3101 3101 'hg.repogroup.create.false',
3102 3102 'hg.usergroup.create.false',
3103 3103 'hg.create.write_on_repogroup.true',
3104 3104 'hg.fork.repository',
3105 3105 'hg.register.manual_activate',
3106 3106 'hg.password_reset.enabled',
3107 3107 'hg.extern_activate.auto',
3108 3108 'hg.inherit_default_perms.true',
3109 3109 ]
3110 3110
3111 3111 # defines which permissions are more important higher the more important
3112 3112 # Weight defines which permissions are more important.
3113 3113 # The higher number the more important.
3114 3114 PERM_WEIGHTS = {
3115 3115 'repository.none': 0,
3116 3116 'repository.read': 1,
3117 3117 'repository.write': 3,
3118 3118 'repository.admin': 4,
3119 3119
3120 3120 'group.none': 0,
3121 3121 'group.read': 1,
3122 3122 'group.write': 3,
3123 3123 'group.admin': 4,
3124 3124
3125 3125 'usergroup.none': 0,
3126 3126 'usergroup.read': 1,
3127 3127 'usergroup.write': 3,
3128 3128 'usergroup.admin': 4,
3129 3129
3130 3130 'branch.none': 0,
3131 3131 'branch.merge': 1,
3132 3132 'branch.push': 3,
3133 3133 'branch.push_force': 4,
3134 3134
3135 3135 'hg.repogroup.create.false': 0,
3136 3136 'hg.repogroup.create.true': 1,
3137 3137
3138 3138 'hg.usergroup.create.false': 0,
3139 3139 'hg.usergroup.create.true': 1,
3140 3140
3141 3141 'hg.fork.none': 0,
3142 3142 'hg.fork.repository': 1,
3143 3143 'hg.create.none': 0,
3144 3144 'hg.create.repository': 1
3145 3145 }
3146 3146
3147 3147 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3148 3148 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3149 3149 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3150 3150
3151 3151 def __unicode__(self):
3152 3152 return u"<%s('%s:%s')>" % (
3153 3153 self.__class__.__name__, self.permission_id, self.permission_name
3154 3154 )
3155 3155
3156 3156 @classmethod
3157 3157 def get_by_key(cls, key):
3158 3158 return cls.query().filter(cls.permission_name == key).scalar()
3159 3159
3160 3160 @classmethod
3161 3161 def get_default_repo_perms(cls, user_id, repo_id=None):
3162 3162 q = Session().query(UserRepoToPerm, Repository, Permission)\
3163 3163 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3164 3164 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3165 3165 .filter(UserRepoToPerm.user_id == user_id)
3166 3166 if repo_id:
3167 3167 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3168 3168 return q.all()
3169 3169
3170 3170 @classmethod
3171 3171 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3172 3172 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3173 3173 .join(
3174 3174 Permission,
3175 3175 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3176 3176 .join(
3177 3177 UserRepoToPerm,
3178 3178 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3179 3179 .filter(UserRepoToPerm.user_id == user_id)
3180 3180
3181 3181 if repo_id:
3182 3182 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3183 3183 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3184 3184
3185 3185 @classmethod
3186 3186 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3187 3187 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3188 3188 .join(
3189 3189 Permission,
3190 3190 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3191 3191 .join(
3192 3192 Repository,
3193 3193 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3194 3194 .join(
3195 3195 UserGroup,
3196 3196 UserGroupRepoToPerm.users_group_id ==
3197 3197 UserGroup.users_group_id)\
3198 3198 .join(
3199 3199 UserGroupMember,
3200 3200 UserGroupRepoToPerm.users_group_id ==
3201 3201 UserGroupMember.users_group_id)\
3202 3202 .filter(
3203 3203 UserGroupMember.user_id == user_id,
3204 3204 UserGroup.users_group_active == true())
3205 3205 if repo_id:
3206 3206 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3207 3207 return q.all()
3208 3208
3209 3209 @classmethod
3210 3210 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3211 3211 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3212 3212 .join(
3213 3213 Permission,
3214 3214 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3215 3215 .join(
3216 3216 UserGroupRepoToPerm,
3217 3217 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3218 3218 .join(
3219 3219 UserGroup,
3220 3220 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3221 3221 .join(
3222 3222 UserGroupMember,
3223 3223 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3224 3224 .filter(
3225 3225 UserGroupMember.user_id == user_id,
3226 3226 UserGroup.users_group_active == true())
3227 3227
3228 3228 if repo_id:
3229 3229 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3230 3230 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3231 3231
3232 3232 @classmethod
3233 3233 def get_default_group_perms(cls, user_id, repo_group_id=None):
3234 3234 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3235 3235 .join(
3236 3236 Permission,
3237 3237 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3238 3238 .join(
3239 3239 RepoGroup,
3240 3240 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3241 3241 .filter(UserRepoGroupToPerm.user_id == user_id)
3242 3242 if repo_group_id:
3243 3243 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3244 3244 return q.all()
3245 3245
3246 3246 @classmethod
3247 3247 def get_default_group_perms_from_user_group(
3248 3248 cls, user_id, repo_group_id=None):
3249 3249 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3250 3250 .join(
3251 3251 Permission,
3252 3252 UserGroupRepoGroupToPerm.permission_id ==
3253 3253 Permission.permission_id)\
3254 3254 .join(
3255 3255 RepoGroup,
3256 3256 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3257 3257 .join(
3258 3258 UserGroup,
3259 3259 UserGroupRepoGroupToPerm.users_group_id ==
3260 3260 UserGroup.users_group_id)\
3261 3261 .join(
3262 3262 UserGroupMember,
3263 3263 UserGroupRepoGroupToPerm.users_group_id ==
3264 3264 UserGroupMember.users_group_id)\
3265 3265 .filter(
3266 3266 UserGroupMember.user_id == user_id,
3267 3267 UserGroup.users_group_active == true())
3268 3268 if repo_group_id:
3269 3269 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3270 3270 return q.all()
3271 3271
3272 3272 @classmethod
3273 3273 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3274 3274 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3275 3275 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3276 3276 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3277 3277 .filter(UserUserGroupToPerm.user_id == user_id)
3278 3278 if user_group_id:
3279 3279 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3280 3280 return q.all()
3281 3281
3282 3282 @classmethod
3283 3283 def get_default_user_group_perms_from_user_group(
3284 3284 cls, user_id, user_group_id=None):
3285 3285 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3286 3286 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3287 3287 .join(
3288 3288 Permission,
3289 3289 UserGroupUserGroupToPerm.permission_id ==
3290 3290 Permission.permission_id)\
3291 3291 .join(
3292 3292 TargetUserGroup,
3293 3293 UserGroupUserGroupToPerm.target_user_group_id ==
3294 3294 TargetUserGroup.users_group_id)\
3295 3295 .join(
3296 3296 UserGroup,
3297 3297 UserGroupUserGroupToPerm.user_group_id ==
3298 3298 UserGroup.users_group_id)\
3299 3299 .join(
3300 3300 UserGroupMember,
3301 3301 UserGroupUserGroupToPerm.user_group_id ==
3302 3302 UserGroupMember.users_group_id)\
3303 3303 .filter(
3304 3304 UserGroupMember.user_id == user_id,
3305 3305 UserGroup.users_group_active == true())
3306 3306 if user_group_id:
3307 3307 q = q.filter(
3308 3308 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3309 3309
3310 3310 return q.all()
3311 3311
3312 3312
3313 3313 class UserRepoToPerm(Base, BaseModel):
3314 3314 __tablename__ = 'repo_to_perm'
3315 3315 __table_args__ = (
3316 3316 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3317 3317 base_table_args
3318 3318 )
3319 3319
3320 3320 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3321 3321 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3322 3322 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3323 3323 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3324 3324
3325 3325 user = relationship('User')
3326 3326 repository = relationship('Repository')
3327 3327 permission = relationship('Permission')
3328 3328
3329 3329 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3330 3330
3331 3331 @classmethod
3332 3332 def create(cls, user, repository, permission):
3333 3333 n = cls()
3334 3334 n.user = user
3335 3335 n.repository = repository
3336 3336 n.permission = permission
3337 3337 Session().add(n)
3338 3338 return n
3339 3339
3340 3340 def __unicode__(self):
3341 3341 return u'<%s => %s >' % (self.user, self.repository)
3342 3342
3343 3343
3344 3344 class UserUserGroupToPerm(Base, BaseModel):
3345 3345 __tablename__ = 'user_user_group_to_perm'
3346 3346 __table_args__ = (
3347 3347 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3348 3348 base_table_args
3349 3349 )
3350 3350
3351 3351 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3352 3352 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3353 3353 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3354 3354 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3355 3355
3356 3356 user = relationship('User')
3357 3357 user_group = relationship('UserGroup')
3358 3358 permission = relationship('Permission')
3359 3359
3360 3360 @classmethod
3361 3361 def create(cls, user, user_group, permission):
3362 3362 n = cls()
3363 3363 n.user = user
3364 3364 n.user_group = user_group
3365 3365 n.permission = permission
3366 3366 Session().add(n)
3367 3367 return n
3368 3368
3369 3369 def __unicode__(self):
3370 3370 return u'<%s => %s >' % (self.user, self.user_group)
3371 3371
3372 3372
3373 3373 class UserToPerm(Base, BaseModel):
3374 3374 __tablename__ = 'user_to_perm'
3375 3375 __table_args__ = (
3376 3376 UniqueConstraint('user_id', 'permission_id'),
3377 3377 base_table_args
3378 3378 )
3379 3379
3380 3380 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3381 3381 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3382 3382 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3383 3383
3384 3384 user = relationship('User')
3385 3385 permission = relationship('Permission', lazy='joined')
3386 3386
3387 3387 def __unicode__(self):
3388 3388 return u'<%s => %s >' % (self.user, self.permission)
3389 3389
3390 3390
3391 3391 class UserGroupRepoToPerm(Base, BaseModel):
3392 3392 __tablename__ = 'users_group_repo_to_perm'
3393 3393 __table_args__ = (
3394 3394 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3395 3395 base_table_args
3396 3396 )
3397 3397
3398 3398 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3399 3399 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3400 3400 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3401 3401 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3402 3402
3403 3403 users_group = relationship('UserGroup')
3404 3404 permission = relationship('Permission')
3405 3405 repository = relationship('Repository')
3406 3406 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3407 3407
3408 3408 @classmethod
3409 3409 def create(cls, users_group, repository, permission):
3410 3410 n = cls()
3411 3411 n.users_group = users_group
3412 3412 n.repository = repository
3413 3413 n.permission = permission
3414 3414 Session().add(n)
3415 3415 return n
3416 3416
3417 3417 def __unicode__(self):
3418 3418 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3419 3419
3420 3420
3421 3421 class UserGroupUserGroupToPerm(Base, BaseModel):
3422 3422 __tablename__ = 'user_group_user_group_to_perm'
3423 3423 __table_args__ = (
3424 3424 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3425 3425 CheckConstraint('target_user_group_id != user_group_id'),
3426 3426 base_table_args
3427 3427 )
3428 3428
3429 3429 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3430 3430 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3431 3431 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3432 3432 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3433 3433
3434 3434 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3435 3435 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3436 3436 permission = relationship('Permission')
3437 3437
3438 3438 @classmethod
3439 3439 def create(cls, target_user_group, user_group, permission):
3440 3440 n = cls()
3441 3441 n.target_user_group = target_user_group
3442 3442 n.user_group = user_group
3443 3443 n.permission = permission
3444 3444 Session().add(n)
3445 3445 return n
3446 3446
3447 3447 def __unicode__(self):
3448 3448 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3449 3449
3450 3450
3451 3451 class UserGroupToPerm(Base, BaseModel):
3452 3452 __tablename__ = 'users_group_to_perm'
3453 3453 __table_args__ = (
3454 3454 UniqueConstraint('users_group_id', 'permission_id',),
3455 3455 base_table_args
3456 3456 )
3457 3457
3458 3458 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3459 3459 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3460 3460 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3461 3461
3462 3462 users_group = relationship('UserGroup')
3463 3463 permission = relationship('Permission')
3464 3464
3465 3465
3466 3466 class UserRepoGroupToPerm(Base, BaseModel):
3467 3467 __tablename__ = 'user_repo_group_to_perm'
3468 3468 __table_args__ = (
3469 3469 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3470 3470 base_table_args
3471 3471 )
3472 3472
3473 3473 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3474 3474 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3475 3475 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3476 3476 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3477 3477
3478 3478 user = relationship('User')
3479 3479 group = relationship('RepoGroup')
3480 3480 permission = relationship('Permission')
3481 3481
3482 3482 @classmethod
3483 3483 def create(cls, user, repository_group, permission):
3484 3484 n = cls()
3485 3485 n.user = user
3486 3486 n.group = repository_group
3487 3487 n.permission = permission
3488 3488 Session().add(n)
3489 3489 return n
3490 3490
3491 3491
3492 3492 class UserGroupRepoGroupToPerm(Base, BaseModel):
3493 3493 __tablename__ = 'users_group_repo_group_to_perm'
3494 3494 __table_args__ = (
3495 3495 UniqueConstraint('users_group_id', 'group_id'),
3496 3496 base_table_args
3497 3497 )
3498 3498
3499 3499 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3500 3500 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3501 3501 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3502 3502 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3503 3503
3504 3504 users_group = relationship('UserGroup')
3505 3505 permission = relationship('Permission')
3506 3506 group = relationship('RepoGroup')
3507 3507
3508 3508 @classmethod
3509 3509 def create(cls, user_group, repository_group, permission):
3510 3510 n = cls()
3511 3511 n.users_group = user_group
3512 3512 n.group = repository_group
3513 3513 n.permission = permission
3514 3514 Session().add(n)
3515 3515 return n
3516 3516
3517 3517 def __unicode__(self):
3518 3518 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3519 3519
3520 3520
3521 3521 class Statistics(Base, BaseModel):
3522 3522 __tablename__ = 'statistics'
3523 3523 __table_args__ = (
3524 3524 base_table_args
3525 3525 )
3526 3526
3527 3527 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3528 3528 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3529 3529 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3530 3530 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3531 3531 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3532 3532 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3533 3533
3534 3534 repository = relationship('Repository', single_parent=True)
3535 3535
3536 3536
3537 3537 class UserFollowing(Base, BaseModel):
3538 3538 __tablename__ = 'user_followings'
3539 3539 __table_args__ = (
3540 3540 UniqueConstraint('user_id', 'follows_repository_id'),
3541 3541 UniqueConstraint('user_id', 'follows_user_id'),
3542 3542 base_table_args
3543 3543 )
3544 3544
3545 3545 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3546 3546 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3547 3547 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3548 3548 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3549 3549 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3550 3550
3551 3551 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3552 3552
3553 3553 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3554 3554 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3555 3555
3556 3556 @classmethod
3557 3557 def get_repo_followers(cls, repo_id):
3558 3558 return cls.query().filter(cls.follows_repo_id == repo_id)
3559 3559
3560 3560
3561 3561 class CacheKey(Base, BaseModel):
3562 3562 __tablename__ = 'cache_invalidation'
3563 3563 __table_args__ = (
3564 3564 UniqueConstraint('cache_key'),
3565 3565 Index('key_idx', 'cache_key'),
3566 3566 base_table_args,
3567 3567 )
3568 3568
3569 3569 CACHE_TYPE_FEED = 'FEED'
3570 3570
3571 3571 # namespaces used to register process/thread aware caches
3572 3572 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3573 3573 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3574 3574
3575 3575 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3576 3576 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3577 3577 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3578 3578 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3579 3579 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3580 3580
3581 3581 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3582 3582 self.cache_key = cache_key
3583 3583 self.cache_args = cache_args
3584 3584 self.cache_active = False
3585 3585 # first key should be same for all entries, since all workers should share it
3586 3586 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3587 3587
3588 3588 def __unicode__(self):
3589 3589 return u"<%s('%s:%s[%s]')>" % (
3590 3590 self.__class__.__name__,
3591 3591 self.cache_id, self.cache_key, self.cache_active)
3592 3592
3593 3593 def _cache_key_partition(self):
3594 3594 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3595 3595 return prefix, repo_name, suffix
3596 3596
3597 3597 def get_prefix(self):
3598 3598 """
3599 3599 Try to extract prefix from existing cache key. The key could consist
3600 3600 of prefix, repo_name, suffix
3601 3601 """
3602 3602 # this returns prefix, repo_name, suffix
3603 3603 return self._cache_key_partition()[0]
3604 3604
3605 3605 def get_suffix(self):
3606 3606 """
3607 3607 get suffix that might have been used in _get_cache_key to
3608 3608 generate self.cache_key. Only used for informational purposes
3609 3609 in repo_edit.mako.
3610 3610 """
3611 3611 # prefix, repo_name, suffix
3612 3612 return self._cache_key_partition()[2]
3613 3613
3614 3614 @classmethod
3615 3615 def generate_new_state_uid(cls, based_on=None):
3616 3616 if based_on:
3617 3617 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3618 3618 else:
3619 3619 return str(uuid.uuid4())
3620 3620
3621 3621 @classmethod
3622 3622 def delete_all_cache(cls):
3623 3623 """
3624 3624 Delete all cache keys from database.
3625 3625 Should only be run when all instances are down and all entries
3626 3626 thus stale.
3627 3627 """
3628 3628 cls.query().delete()
3629 3629 Session().commit()
3630 3630
3631 3631 @classmethod
3632 3632 def set_invalidate(cls, cache_uid, delete=False):
3633 3633 """
3634 3634 Mark all caches of a repo as invalid in the database.
3635 3635 """
3636 3636
3637 3637 try:
3638 3638 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3639 3639 if delete:
3640 3640 qry.delete()
3641 3641 log.debug('cache objects deleted for cache args %s',
3642 3642 safe_str(cache_uid))
3643 3643 else:
3644 3644 qry.update({"cache_active": False,
3645 3645 "cache_state_uid": cls.generate_new_state_uid()})
3646 3646 log.debug('cache objects marked as invalid for cache args %s',
3647 3647 safe_str(cache_uid))
3648 3648
3649 3649 Session().commit()
3650 3650 except Exception:
3651 3651 log.exception(
3652 3652 'Cache key invalidation failed for cache args %s',
3653 3653 safe_str(cache_uid))
3654 3654 Session().rollback()
3655 3655
3656 3656 @classmethod
3657 3657 def get_active_cache(cls, cache_key):
3658 3658 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3659 3659 if inv_obj:
3660 3660 return inv_obj
3661 3661 return None
3662 3662
3663 3663 @classmethod
3664 3664 def get_namespace_map(cls, namespace):
3665 3665 return {
3666 3666 x.cache_key: x
3667 3667 for x in cls.query().filter(cls.cache_args == namespace)}
3668 3668
3669 3669
3670 3670 class ChangesetComment(Base, BaseModel):
3671 3671 __tablename__ = 'changeset_comments'
3672 3672 __table_args__ = (
3673 3673 Index('cc_revision_idx', 'revision'),
3674 3674 base_table_args,
3675 3675 )
3676 3676
3677 3677 COMMENT_OUTDATED = u'comment_outdated'
3678 3678 COMMENT_TYPE_NOTE = u'note'
3679 3679 COMMENT_TYPE_TODO = u'todo'
3680 3680 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3681 3681
3682 3682 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3683 3683 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3684 3684 revision = Column('revision', String(40), nullable=True)
3685 3685 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3686 3686 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3687 3687 line_no = Column('line_no', Unicode(10), nullable=True)
3688 3688 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3689 3689 f_path = Column('f_path', Unicode(1000), nullable=True)
3690 3690 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3691 3691 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3692 3692 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3693 3693 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3694 3694 renderer = Column('renderer', Unicode(64), nullable=True)
3695 3695 display_state = Column('display_state', Unicode(128), nullable=True)
3696 3696
3697 3697 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3698 3698 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3699 3699
3700 3700 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3701 3701 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3702 3702
3703 3703 author = relationship('User', lazy='joined')
3704 3704 repo = relationship('Repository')
3705 3705 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined')
3706 3706 pull_request = relationship('PullRequest', lazy='joined')
3707 3707 pull_request_version = relationship('PullRequestVersion')
3708 3708
3709 3709 @classmethod
3710 3710 def get_users(cls, revision=None, pull_request_id=None):
3711 3711 """
3712 3712 Returns user associated with this ChangesetComment. ie those
3713 3713 who actually commented
3714 3714
3715 3715 :param cls:
3716 3716 :param revision:
3717 3717 """
3718 3718 q = Session().query(User)\
3719 3719 .join(ChangesetComment.author)
3720 3720 if revision:
3721 3721 q = q.filter(cls.revision == revision)
3722 3722 elif pull_request_id:
3723 3723 q = q.filter(cls.pull_request_id == pull_request_id)
3724 3724 return q.all()
3725 3725
3726 3726 @classmethod
3727 3727 def get_index_from_version(cls, pr_version, versions):
3728 3728 num_versions = [x.pull_request_version_id for x in versions]
3729 3729 try:
3730 3730 return num_versions.index(pr_version) +1
3731 3731 except (IndexError, ValueError):
3732 3732 return
3733 3733
3734 3734 @property
3735 3735 def outdated(self):
3736 3736 return self.display_state == self.COMMENT_OUTDATED
3737 3737
3738 3738 def outdated_at_version(self, version):
3739 3739 """
3740 3740 Checks if comment is outdated for given pull request version
3741 3741 """
3742 3742 return self.outdated and self.pull_request_version_id != version
3743 3743
3744 3744 def older_than_version(self, version):
3745 3745 """
3746 3746 Checks if comment is made from previous version than given
3747 3747 """
3748 3748 if version is None:
3749 3749 return self.pull_request_version_id is not None
3750 3750
3751 3751 return self.pull_request_version_id < version
3752 3752
3753 3753 @property
3754 3754 def resolved(self):
3755 3755 return self.resolved_by[0] if self.resolved_by else None
3756 3756
3757 3757 @property
3758 3758 def is_todo(self):
3759 3759 return self.comment_type == self.COMMENT_TYPE_TODO
3760 3760
3761 3761 @property
3762 3762 def is_inline(self):
3763 3763 return self.line_no and self.f_path
3764 3764
3765 3765 def get_index_version(self, versions):
3766 3766 return self.get_index_from_version(
3767 3767 self.pull_request_version_id, versions)
3768 3768
3769 3769 def __repr__(self):
3770 3770 if self.comment_id:
3771 3771 return '<DB:Comment #%s>' % self.comment_id
3772 3772 else:
3773 3773 return '<DB:Comment at %#x>' % id(self)
3774 3774
3775 3775 def get_api_data(self):
3776 3776 comment = self
3777 3777 data = {
3778 3778 'comment_id': comment.comment_id,
3779 3779 'comment_type': comment.comment_type,
3780 3780 'comment_text': comment.text,
3781 3781 'comment_status': comment.status_change,
3782 3782 'comment_f_path': comment.f_path,
3783 3783 'comment_lineno': comment.line_no,
3784 3784 'comment_author': comment.author,
3785 3785 'comment_created_on': comment.created_on,
3786 3786 'comment_resolved_by': self.resolved
3787 3787 }
3788 3788 return data
3789 3789
3790 3790 def __json__(self):
3791 3791 data = dict()
3792 3792 data.update(self.get_api_data())
3793 3793 return data
3794 3794
3795 3795
3796 3796 class ChangesetStatus(Base, BaseModel):
3797 3797 __tablename__ = 'changeset_statuses'
3798 3798 __table_args__ = (
3799 3799 Index('cs_revision_idx', 'revision'),
3800 3800 Index('cs_version_idx', 'version'),
3801 3801 UniqueConstraint('repo_id', 'revision', 'version'),
3802 3802 base_table_args
3803 3803 )
3804 3804
3805 3805 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3806 3806 STATUS_APPROVED = 'approved'
3807 3807 STATUS_REJECTED = 'rejected'
3808 3808 STATUS_UNDER_REVIEW = 'under_review'
3809 3809
3810 3810 STATUSES = [
3811 3811 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3812 3812 (STATUS_APPROVED, _("Approved")),
3813 3813 (STATUS_REJECTED, _("Rejected")),
3814 3814 (STATUS_UNDER_REVIEW, _("Under Review")),
3815 3815 ]
3816 3816
3817 3817 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3818 3818 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3819 3819 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3820 3820 revision = Column('revision', String(40), nullable=False)
3821 3821 status = Column('status', String(128), nullable=False, default=DEFAULT)
3822 3822 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3823 3823 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3824 3824 version = Column('version', Integer(), nullable=False, default=0)
3825 3825 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3826 3826
3827 3827 author = relationship('User', lazy='joined')
3828 3828 repo = relationship('Repository')
3829 3829 comment = relationship('ChangesetComment', lazy='joined')
3830 3830 pull_request = relationship('PullRequest', lazy='joined')
3831 3831
3832 3832 def __unicode__(self):
3833 3833 return u"<%s('%s[v%s]:%s')>" % (
3834 3834 self.__class__.__name__,
3835 3835 self.status, self.version, self.author
3836 3836 )
3837 3837
3838 3838 @classmethod
3839 3839 def get_status_lbl(cls, value):
3840 3840 return dict(cls.STATUSES).get(value)
3841 3841
3842 3842 @property
3843 3843 def status_lbl(self):
3844 3844 return ChangesetStatus.get_status_lbl(self.status)
3845 3845
3846 3846 def get_api_data(self):
3847 3847 status = self
3848 3848 data = {
3849 3849 'status_id': status.changeset_status_id,
3850 3850 'status': status.status,
3851 3851 }
3852 3852 return data
3853 3853
3854 3854 def __json__(self):
3855 3855 data = dict()
3856 3856 data.update(self.get_api_data())
3857 3857 return data
3858 3858
3859 3859
3860 3860 class _SetState(object):
3861 3861 """
3862 3862 Context processor allowing changing state for sensitive operation such as
3863 3863 pull request update or merge
3864 3864 """
3865 3865
3866 3866 def __init__(self, pull_request, pr_state, back_state=None):
3867 3867 self._pr = pull_request
3868 3868 self._org_state = back_state or pull_request.pull_request_state
3869 3869 self._pr_state = pr_state
3870 3870 self._current_state = None
3871 3871
3872 3872 def __enter__(self):
3873 3873 log.debug('StateLock: entering set state context, setting state to: `%s`',
3874 3874 self._pr_state)
3875 3875 self.set_pr_state(self._pr_state)
3876 3876 return self
3877 3877
3878 3878 def __exit__(self, exc_type, exc_val, exc_tb):
3879 3879 if exc_val is not None:
3880 3880 log.error(traceback.format_exc(exc_tb))
3881 3881 return None
3882 3882
3883 3883 self.set_pr_state(self._org_state)
3884 3884 log.debug('StateLock: exiting set state context, setting state to: `%s`',
3885 3885 self._org_state)
3886 3886 @property
3887 3887 def state(self):
3888 3888 return self._current_state
3889 3889
3890 3890 def set_pr_state(self, pr_state):
3891 3891 try:
3892 3892 self._pr.pull_request_state = pr_state
3893 3893 Session().add(self._pr)
3894 3894 Session().commit()
3895 3895 self._current_state = pr_state
3896 3896 except Exception:
3897 3897 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
3898 3898 raise
3899 3899
3900 3900
3901 3901 class _PullRequestBase(BaseModel):
3902 3902 """
3903 3903 Common attributes of pull request and version entries.
3904 3904 """
3905 3905
3906 3906 # .status values
3907 3907 STATUS_NEW = u'new'
3908 3908 STATUS_OPEN = u'open'
3909 3909 STATUS_CLOSED = u'closed'
3910 3910
3911 3911 # available states
3912 3912 STATE_CREATING = u'creating'
3913 3913 STATE_UPDATING = u'updating'
3914 3914 STATE_MERGING = u'merging'
3915 3915 STATE_CREATED = u'created'
3916 3916
3917 3917 title = Column('title', Unicode(255), nullable=True)
3918 3918 description = Column(
3919 3919 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3920 3920 nullable=True)
3921 3921 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3922 3922
3923 3923 # new/open/closed status of pull request (not approve/reject/etc)
3924 3924 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3925 3925 created_on = Column(
3926 3926 'created_on', DateTime(timezone=False), nullable=False,
3927 3927 default=datetime.datetime.now)
3928 3928 updated_on = Column(
3929 3929 'updated_on', DateTime(timezone=False), nullable=False,
3930 3930 default=datetime.datetime.now)
3931 3931
3932 3932 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3933 3933
3934 3934 @declared_attr
3935 3935 def user_id(cls):
3936 3936 return Column(
3937 3937 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3938 3938 unique=None)
3939 3939
3940 3940 # 500 revisions max
3941 3941 _revisions = Column(
3942 3942 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3943 3943
3944 3944 @declared_attr
3945 3945 def source_repo_id(cls):
3946 3946 # TODO: dan: rename column to source_repo_id
3947 3947 return Column(
3948 3948 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3949 3949 nullable=False)
3950 3950
3951 3951 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3952 3952
3953 3953 @hybrid_property
3954 3954 def source_ref(self):
3955 3955 return self._source_ref
3956 3956
3957 3957 @source_ref.setter
3958 3958 def source_ref(self, val):
3959 3959 parts = (val or '').split(':')
3960 3960 if len(parts) != 3:
3961 3961 raise ValueError(
3962 3962 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3963 3963 self._source_ref = safe_unicode(val)
3964 3964
3965 3965 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3966 3966
3967 3967 @hybrid_property
3968 3968 def target_ref(self):
3969 3969 return self._target_ref
3970 3970
3971 3971 @target_ref.setter
3972 3972 def target_ref(self, val):
3973 3973 parts = (val or '').split(':')
3974 3974 if len(parts) != 3:
3975 3975 raise ValueError(
3976 3976 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3977 3977 self._target_ref = safe_unicode(val)
3978 3978
3979 3979 @declared_attr
3980 3980 def target_repo_id(cls):
3981 3981 # TODO: dan: rename column to target_repo_id
3982 3982 return Column(
3983 3983 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3984 3984 nullable=False)
3985 3985
3986 3986 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3987 3987
3988 3988 # TODO: dan: rename column to last_merge_source_rev
3989 3989 _last_merge_source_rev = Column(
3990 3990 'last_merge_org_rev', String(40), nullable=True)
3991 3991 # TODO: dan: rename column to last_merge_target_rev
3992 3992 _last_merge_target_rev = Column(
3993 3993 'last_merge_other_rev', String(40), nullable=True)
3994 3994 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3995 3995 merge_rev = Column('merge_rev', String(40), nullable=True)
3996 3996
3997 3997 reviewer_data = Column(
3998 3998 'reviewer_data_json', MutationObj.as_mutable(
3999 3999 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4000 4000
4001 4001 @property
4002 4002 def reviewer_data_json(self):
4003 4003 return json.dumps(self.reviewer_data)
4004 4004
4005 4005 @hybrid_property
4006 4006 def description_safe(self):
4007 4007 from rhodecode.lib import helpers as h
4008 4008 return h.escape(self.description)
4009 4009
4010 4010 @hybrid_property
4011 4011 def revisions(self):
4012 4012 return self._revisions.split(':') if self._revisions else []
4013 4013
4014 4014 @revisions.setter
4015 4015 def revisions(self, val):
4016 4016 self._revisions = u':'.join(val)
4017 4017
4018 4018 @hybrid_property
4019 4019 def last_merge_status(self):
4020 4020 return safe_int(self._last_merge_status)
4021 4021
4022 4022 @last_merge_status.setter
4023 4023 def last_merge_status(self, val):
4024 4024 self._last_merge_status = val
4025 4025
4026 4026 @declared_attr
4027 4027 def author(cls):
4028 4028 return relationship('User', lazy='joined')
4029 4029
4030 4030 @declared_attr
4031 4031 def source_repo(cls):
4032 4032 return relationship(
4033 4033 'Repository',
4034 4034 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4035 4035
4036 4036 @property
4037 4037 def source_ref_parts(self):
4038 4038 return self.unicode_to_reference(self.source_ref)
4039 4039
4040 4040 @declared_attr
4041 4041 def target_repo(cls):
4042 4042 return relationship(
4043 4043 'Repository',
4044 4044 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4045 4045
4046 4046 @property
4047 4047 def target_ref_parts(self):
4048 4048 return self.unicode_to_reference(self.target_ref)
4049 4049
4050 4050 @property
4051 4051 def shadow_merge_ref(self):
4052 4052 return self.unicode_to_reference(self._shadow_merge_ref)
4053 4053
4054 4054 @shadow_merge_ref.setter
4055 4055 def shadow_merge_ref(self, ref):
4056 4056 self._shadow_merge_ref = self.reference_to_unicode(ref)
4057 4057
4058 4058 @staticmethod
4059 4059 def unicode_to_reference(raw):
4060 4060 """
4061 4061 Convert a unicode (or string) to a reference object.
4062 4062 If unicode evaluates to False it returns None.
4063 4063 """
4064 4064 if raw:
4065 4065 refs = raw.split(':')
4066 4066 return Reference(*refs)
4067 4067 else:
4068 4068 return None
4069 4069
4070 4070 @staticmethod
4071 4071 def reference_to_unicode(ref):
4072 4072 """
4073 4073 Convert a reference object to unicode.
4074 4074 If reference is None it returns None.
4075 4075 """
4076 4076 if ref:
4077 4077 return u':'.join(ref)
4078 4078 else:
4079 4079 return None
4080 4080
4081 4081 def get_api_data(self, with_merge_state=True):
4082 4082 from rhodecode.model.pull_request import PullRequestModel
4083 4083
4084 4084 pull_request = self
4085 4085 if with_merge_state:
4086 4086 merge_status = PullRequestModel().merge_status(pull_request)
4087 4087 merge_state = {
4088 4088 'status': merge_status[0],
4089 4089 'message': safe_unicode(merge_status[1]),
4090 4090 }
4091 4091 else:
4092 4092 merge_state = {'status': 'not_available',
4093 4093 'message': 'not_available'}
4094 4094
4095 4095 merge_data = {
4096 4096 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4097 4097 'reference': (
4098 4098 pull_request.shadow_merge_ref._asdict()
4099 4099 if pull_request.shadow_merge_ref else None),
4100 4100 }
4101 4101
4102 4102 data = {
4103 4103 'pull_request_id': pull_request.pull_request_id,
4104 4104 'url': PullRequestModel().get_url(pull_request),
4105 4105 'title': pull_request.title,
4106 4106 'description': pull_request.description,
4107 4107 'status': pull_request.status,
4108 4108 'state': pull_request.pull_request_state,
4109 4109 'created_on': pull_request.created_on,
4110 4110 'updated_on': pull_request.updated_on,
4111 4111 'commit_ids': pull_request.revisions,
4112 4112 'review_status': pull_request.calculated_review_status(),
4113 4113 'mergeable': merge_state,
4114 4114 'source': {
4115 4115 'clone_url': pull_request.source_repo.clone_url(),
4116 4116 'repository': pull_request.source_repo.repo_name,
4117 4117 'reference': {
4118 4118 'name': pull_request.source_ref_parts.name,
4119 4119 'type': pull_request.source_ref_parts.type,
4120 4120 'commit_id': pull_request.source_ref_parts.commit_id,
4121 4121 },
4122 4122 },
4123 4123 'target': {
4124 4124 'clone_url': pull_request.target_repo.clone_url(),
4125 4125 'repository': pull_request.target_repo.repo_name,
4126 4126 'reference': {
4127 4127 'name': pull_request.target_ref_parts.name,
4128 4128 'type': pull_request.target_ref_parts.type,
4129 4129 'commit_id': pull_request.target_ref_parts.commit_id,
4130 4130 },
4131 4131 },
4132 4132 'merge': merge_data,
4133 4133 'author': pull_request.author.get_api_data(include_secrets=False,
4134 4134 details='basic'),
4135 4135 'reviewers': [
4136 4136 {
4137 4137 'user': reviewer.get_api_data(include_secrets=False,
4138 4138 details='basic'),
4139 4139 'reasons': reasons,
4140 4140 'review_status': st[0][1].status if st else 'not_reviewed',
4141 4141 }
4142 4142 for obj, reviewer, reasons, mandatory, st in
4143 4143 pull_request.reviewers_statuses()
4144 4144 ]
4145 4145 }
4146 4146
4147 4147 return data
4148 4148
4149 4149 def set_state(self, pull_request_state, final_state=None):
4150 4150 """
4151 4151 # goes from initial state to updating to initial state.
4152 4152 # initial state can be changed by specifying back_state=
4153 4153 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4154 4154 pull_request.merge()
4155 4155
4156 4156 :param pull_request_state:
4157 4157 :param final_state:
4158 4158
4159 4159 """
4160 4160
4161 4161 return _SetState(self, pull_request_state, back_state=final_state)
4162 4162
4163 4163
4164 4164 class PullRequest(Base, _PullRequestBase):
4165 4165 __tablename__ = 'pull_requests'
4166 4166 __table_args__ = (
4167 4167 base_table_args,
4168 4168 )
4169 4169
4170 4170 pull_request_id = Column(
4171 4171 'pull_request_id', Integer(), nullable=False, primary_key=True)
4172 4172
4173 4173 def __repr__(self):
4174 4174 if self.pull_request_id:
4175 4175 return '<DB:PullRequest #%s>' % self.pull_request_id
4176 4176 else:
4177 4177 return '<DB:PullRequest at %#x>' % id(self)
4178 4178
4179 4179 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4180 4180 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4181 4181 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4182 4182 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4183 4183 lazy='dynamic')
4184 4184
4185 4185 @classmethod
4186 4186 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4187 4187 internal_methods=None):
4188 4188
4189 4189 class PullRequestDisplay(object):
4190 4190 """
4191 4191 Special object wrapper for showing PullRequest data via Versions
4192 4192 It mimics PR object as close as possible. This is read only object
4193 4193 just for display
4194 4194 """
4195 4195
4196 4196 def __init__(self, attrs, internal=None):
4197 4197 self.attrs = attrs
4198 4198 # internal have priority over the given ones via attrs
4199 4199 self.internal = internal or ['versions']
4200 4200
4201 4201 def __getattr__(self, item):
4202 4202 if item in self.internal:
4203 4203 return getattr(self, item)
4204 4204 try:
4205 4205 return self.attrs[item]
4206 4206 except KeyError:
4207 4207 raise AttributeError(
4208 4208 '%s object has no attribute %s' % (self, item))
4209 4209
4210 4210 def __repr__(self):
4211 4211 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4212 4212
4213 4213 def versions(self):
4214 4214 return pull_request_obj.versions.order_by(
4215 4215 PullRequestVersion.pull_request_version_id).all()
4216 4216
4217 4217 def is_closed(self):
4218 4218 return pull_request_obj.is_closed()
4219 4219
4220 4220 @property
4221 4221 def pull_request_version_id(self):
4222 4222 return getattr(pull_request_obj, 'pull_request_version_id', None)
4223 4223
4224 4224 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4225 4225
4226 4226 attrs.author = StrictAttributeDict(
4227 4227 pull_request_obj.author.get_api_data())
4228 4228 if pull_request_obj.target_repo:
4229 4229 attrs.target_repo = StrictAttributeDict(
4230 4230 pull_request_obj.target_repo.get_api_data())
4231 4231 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4232 4232
4233 4233 if pull_request_obj.source_repo:
4234 4234 attrs.source_repo = StrictAttributeDict(
4235 4235 pull_request_obj.source_repo.get_api_data())
4236 4236 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4237 4237
4238 4238 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4239 4239 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4240 4240 attrs.revisions = pull_request_obj.revisions
4241 4241
4242 4242 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4243 4243 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4244 4244 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4245 4245
4246 4246 return PullRequestDisplay(attrs, internal=internal_methods)
4247 4247
4248 4248 def is_closed(self):
4249 4249 return self.status == self.STATUS_CLOSED
4250 4250
4251 4251 def __json__(self):
4252 4252 return {
4253 4253 'revisions': self.revisions,
4254 4254 }
4255 4255
4256 4256 def calculated_review_status(self):
4257 4257 from rhodecode.model.changeset_status import ChangesetStatusModel
4258 4258 return ChangesetStatusModel().calculated_review_status(self)
4259 4259
4260 4260 def reviewers_statuses(self):
4261 4261 from rhodecode.model.changeset_status import ChangesetStatusModel
4262 4262 return ChangesetStatusModel().reviewers_statuses(self)
4263 4263
4264 4264 @property
4265 4265 def workspace_id(self):
4266 4266 from rhodecode.model.pull_request import PullRequestModel
4267 4267 return PullRequestModel()._workspace_id(self)
4268 4268
4269 4269 def get_shadow_repo(self):
4270 4270 workspace_id = self.workspace_id
4271 4271 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4272 4272 if os.path.isdir(shadow_repository_path):
4273 4273 vcs_obj = self.target_repo.scm_instance()
4274 4274 return vcs_obj.get_shadow_instance(shadow_repository_path)
4275 4275
4276 4276
4277 4277 class PullRequestVersion(Base, _PullRequestBase):
4278 4278 __tablename__ = 'pull_request_versions'
4279 4279 __table_args__ = (
4280 4280 base_table_args,
4281 4281 )
4282 4282
4283 4283 pull_request_version_id = Column(
4284 4284 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4285 4285 pull_request_id = Column(
4286 4286 'pull_request_id', Integer(),
4287 4287 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4288 4288 pull_request = relationship('PullRequest')
4289 4289
4290 4290 def __repr__(self):
4291 4291 if self.pull_request_version_id:
4292 4292 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4293 4293 else:
4294 4294 return '<DB:PullRequestVersion at %#x>' % id(self)
4295 4295
4296 4296 @property
4297 4297 def reviewers(self):
4298 4298 return self.pull_request.reviewers
4299 4299
4300 4300 @property
4301 4301 def versions(self):
4302 4302 return self.pull_request.versions
4303 4303
4304 4304 def is_closed(self):
4305 4305 # calculate from original
4306 4306 return self.pull_request.status == self.STATUS_CLOSED
4307 4307
4308 4308 def calculated_review_status(self):
4309 4309 return self.pull_request.calculated_review_status()
4310 4310
4311 4311 def reviewers_statuses(self):
4312 4312 return self.pull_request.reviewers_statuses()
4313 4313
4314 4314
4315 4315 class PullRequestReviewers(Base, BaseModel):
4316 4316 __tablename__ = 'pull_request_reviewers'
4317 4317 __table_args__ = (
4318 4318 base_table_args,
4319 4319 )
4320 4320
4321 4321 @hybrid_property
4322 4322 def reasons(self):
4323 4323 if not self._reasons:
4324 4324 return []
4325 4325 return self._reasons
4326 4326
4327 4327 @reasons.setter
4328 4328 def reasons(self, val):
4329 4329 val = val or []
4330 4330 if any(not isinstance(x, compat.string_types) for x in val):
4331 4331 raise Exception('invalid reasons type, must be list of strings')
4332 4332 self._reasons = val
4333 4333
4334 4334 pull_requests_reviewers_id = Column(
4335 4335 'pull_requests_reviewers_id', Integer(), nullable=False,
4336 4336 primary_key=True)
4337 4337 pull_request_id = Column(
4338 4338 "pull_request_id", Integer(),
4339 4339 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4340 4340 user_id = Column(
4341 4341 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4342 4342 _reasons = Column(
4343 4343 'reason', MutationList.as_mutable(
4344 4344 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4345 4345
4346 4346 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4347 4347 user = relationship('User')
4348 4348 pull_request = relationship('PullRequest')
4349 4349
4350 4350 rule_data = Column(
4351 4351 'rule_data_json',
4352 4352 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4353 4353
4354 4354 def rule_user_group_data(self):
4355 4355 """
4356 4356 Returns the voting user group rule data for this reviewer
4357 4357 """
4358 4358
4359 4359 if self.rule_data and 'vote_rule' in self.rule_data:
4360 4360 user_group_data = {}
4361 4361 if 'rule_user_group_entry_id' in self.rule_data:
4362 4362 # means a group with voting rules !
4363 4363 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4364 4364 user_group_data['name'] = self.rule_data['rule_name']
4365 4365 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4366 4366
4367 4367 return user_group_data
4368 4368
4369 4369 def __unicode__(self):
4370 4370 return u"<%s('id:%s')>" % (self.__class__.__name__,
4371 4371 self.pull_requests_reviewers_id)
4372 4372
4373 4373
4374 4374 class Notification(Base, BaseModel):
4375 4375 __tablename__ = 'notifications'
4376 4376 __table_args__ = (
4377 4377 Index('notification_type_idx', 'type'),
4378 4378 base_table_args,
4379 4379 )
4380 4380
4381 4381 TYPE_CHANGESET_COMMENT = u'cs_comment'
4382 4382 TYPE_MESSAGE = u'message'
4383 4383 TYPE_MENTION = u'mention'
4384 4384 TYPE_REGISTRATION = u'registration'
4385 4385 TYPE_PULL_REQUEST = u'pull_request'
4386 4386 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4387 4387
4388 4388 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4389 4389 subject = Column('subject', Unicode(512), nullable=True)
4390 4390 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4391 4391 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4392 4392 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4393 4393 type_ = Column('type', Unicode(255))
4394 4394
4395 4395 created_by_user = relationship('User')
4396 4396 notifications_to_users = relationship('UserNotification', lazy='joined',
4397 4397 cascade="all, delete-orphan")
4398 4398
4399 4399 @property
4400 4400 def recipients(self):
4401 4401 return [x.user for x in UserNotification.query()\
4402 4402 .filter(UserNotification.notification == self)\
4403 4403 .order_by(UserNotification.user_id.asc()).all()]
4404 4404
4405 4405 @classmethod
4406 4406 def create(cls, created_by, subject, body, recipients, type_=None):
4407 4407 if type_ is None:
4408 4408 type_ = Notification.TYPE_MESSAGE
4409 4409
4410 4410 notification = cls()
4411 4411 notification.created_by_user = created_by
4412 4412 notification.subject = subject
4413 4413 notification.body = body
4414 4414 notification.type_ = type_
4415 4415 notification.created_on = datetime.datetime.now()
4416 4416
4417 4417 # For each recipient link the created notification to his account
4418 4418 for u in recipients:
4419 4419 assoc = UserNotification()
4420 4420 assoc.user_id = u.user_id
4421 4421 assoc.notification = notification
4422 4422
4423 4423 # if created_by is inside recipients mark his notification
4424 4424 # as read
4425 4425 if u.user_id == created_by.user_id:
4426 4426 assoc.read = True
4427 4427 Session().add(assoc)
4428 4428
4429 4429 Session().add(notification)
4430 4430
4431 4431 return notification
4432 4432
4433 4433
4434 4434 class UserNotification(Base, BaseModel):
4435 4435 __tablename__ = 'user_to_notification'
4436 4436 __table_args__ = (
4437 4437 UniqueConstraint('user_id', 'notification_id'),
4438 4438 base_table_args
4439 4439 )
4440 4440
4441 4441 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4442 4442 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4443 4443 read = Column('read', Boolean, default=False)
4444 4444 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4445 4445
4446 4446 user = relationship('User', lazy="joined")
4447 4447 notification = relationship('Notification', lazy="joined",
4448 4448 order_by=lambda: Notification.created_on.desc(),)
4449 4449
4450 4450 def mark_as_read(self):
4451 4451 self.read = True
4452 4452 Session().add(self)
4453 4453
4454 4454
4455 4455 class Gist(Base, BaseModel):
4456 4456 __tablename__ = 'gists'
4457 4457 __table_args__ = (
4458 4458 Index('g_gist_access_id_idx', 'gist_access_id'),
4459 4459 Index('g_created_on_idx', 'created_on'),
4460 4460 base_table_args
4461 4461 )
4462 4462
4463 4463 GIST_PUBLIC = u'public'
4464 4464 GIST_PRIVATE = u'private'
4465 4465 DEFAULT_FILENAME = u'gistfile1.txt'
4466 4466
4467 4467 ACL_LEVEL_PUBLIC = u'acl_public'
4468 4468 ACL_LEVEL_PRIVATE = u'acl_private'
4469 4469
4470 4470 gist_id = Column('gist_id', Integer(), primary_key=True)
4471 4471 gist_access_id = Column('gist_access_id', Unicode(250))
4472 4472 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4473 4473 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4474 4474 gist_expires = Column('gist_expires', Float(53), nullable=False)
4475 4475 gist_type = Column('gist_type', Unicode(128), nullable=False)
4476 4476 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4477 4477 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4478 4478 acl_level = Column('acl_level', Unicode(128), nullable=True)
4479 4479
4480 4480 owner = relationship('User')
4481 4481
4482 4482 def __repr__(self):
4483 4483 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4484 4484
4485 4485 @hybrid_property
4486 4486 def description_safe(self):
4487 4487 from rhodecode.lib import helpers as h
4488 4488 return h.escape(self.gist_description)
4489 4489
4490 4490 @classmethod
4491 4491 def get_or_404(cls, id_):
4492 4492 from pyramid.httpexceptions import HTTPNotFound
4493 4493
4494 4494 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4495 4495 if not res:
4496 4496 raise HTTPNotFound()
4497 4497 return res
4498 4498
4499 4499 @classmethod
4500 4500 def get_by_access_id(cls, gist_access_id):
4501 4501 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4502 4502
4503 4503 def gist_url(self):
4504 4504 from rhodecode.model.gist import GistModel
4505 4505 return GistModel().get_url(self)
4506 4506
4507 4507 @classmethod
4508 4508 def base_path(cls):
4509 4509 """
4510 4510 Returns base path when all gists are stored
4511 4511
4512 4512 :param cls:
4513 4513 """
4514 4514 from rhodecode.model.gist import GIST_STORE_LOC
4515 4515 q = Session().query(RhodeCodeUi)\
4516 4516 .filter(RhodeCodeUi.ui_key == URL_SEP)
4517 4517 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4518 4518 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4519 4519
4520 4520 def get_api_data(self):
4521 4521 """
4522 4522 Common function for generating gist related data for API
4523 4523 """
4524 4524 gist = self
4525 4525 data = {
4526 4526 'gist_id': gist.gist_id,
4527 4527 'type': gist.gist_type,
4528 4528 'access_id': gist.gist_access_id,
4529 4529 'description': gist.gist_description,
4530 4530 'url': gist.gist_url(),
4531 4531 'expires': gist.gist_expires,
4532 4532 'created_on': gist.created_on,
4533 4533 'modified_at': gist.modified_at,
4534 4534 'content': None,
4535 4535 'acl_level': gist.acl_level,
4536 4536 }
4537 4537 return data
4538 4538
4539 4539 def __json__(self):
4540 4540 data = dict(
4541 4541 )
4542 4542 data.update(self.get_api_data())
4543 4543 return data
4544 4544 # SCM functions
4545 4545
4546 4546 def scm_instance(self, **kwargs):
4547 4547 """
4548 4548 Get an instance of VCS Repository
4549 4549
4550 4550 :param kwargs:
4551 4551 """
4552 4552 from rhodecode.model.gist import GistModel
4553 4553 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4554 4554 return get_vcs_instance(
4555 4555 repo_path=safe_str(full_repo_path), create=False,
4556 4556 _vcs_alias=GistModel.vcs_backend)
4557 4557
4558 4558
4559 4559 class ExternalIdentity(Base, BaseModel):
4560 4560 __tablename__ = 'external_identities'
4561 4561 __table_args__ = (
4562 4562 Index('local_user_id_idx', 'local_user_id'),
4563 4563 Index('external_id_idx', 'external_id'),
4564 4564 base_table_args
4565 4565 )
4566 4566
4567 4567 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4568 4568 external_username = Column('external_username', Unicode(1024), default=u'')
4569 4569 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4570 4570 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4571 4571 access_token = Column('access_token', String(1024), default=u'')
4572 4572 alt_token = Column('alt_token', String(1024), default=u'')
4573 4573 token_secret = Column('token_secret', String(1024), default=u'')
4574 4574
4575 4575 @classmethod
4576 4576 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4577 4577 """
4578 4578 Returns ExternalIdentity instance based on search params
4579 4579
4580 4580 :param external_id:
4581 4581 :param provider_name:
4582 4582 :return: ExternalIdentity
4583 4583 """
4584 4584 query = cls.query()
4585 4585 query = query.filter(cls.external_id == external_id)
4586 4586 query = query.filter(cls.provider_name == provider_name)
4587 4587 if local_user_id:
4588 4588 query = query.filter(cls.local_user_id == local_user_id)
4589 4589 return query.first()
4590 4590
4591 4591 @classmethod
4592 4592 def user_by_external_id_and_provider(cls, external_id, provider_name):
4593 4593 """
4594 4594 Returns User instance based on search params
4595 4595
4596 4596 :param external_id:
4597 4597 :param provider_name:
4598 4598 :return: User
4599 4599 """
4600 4600 query = User.query()
4601 4601 query = query.filter(cls.external_id == external_id)
4602 4602 query = query.filter(cls.provider_name == provider_name)
4603 4603 query = query.filter(User.user_id == cls.local_user_id)
4604 4604 return query.first()
4605 4605
4606 4606 @classmethod
4607 4607 def by_local_user_id(cls, local_user_id):
4608 4608 """
4609 4609 Returns all tokens for user
4610 4610
4611 4611 :param local_user_id:
4612 4612 :return: ExternalIdentity
4613 4613 """
4614 4614 query = cls.query()
4615 4615 query = query.filter(cls.local_user_id == local_user_id)
4616 4616 return query
4617 4617
4618 4618 @classmethod
4619 4619 def load_provider_plugin(cls, plugin_id):
4620 4620 from rhodecode.authentication.base import loadplugin
4621 4621 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4622 4622 auth_plugin = loadplugin(_plugin_id)
4623 4623 return auth_plugin
4624 4624
4625 4625
4626 4626 class Integration(Base, BaseModel):
4627 4627 __tablename__ = 'integrations'
4628 4628 __table_args__ = (
4629 4629 base_table_args
4630 4630 )
4631 4631
4632 4632 integration_id = Column('integration_id', Integer(), primary_key=True)
4633 4633 integration_type = Column('integration_type', String(255))
4634 4634 enabled = Column('enabled', Boolean(), nullable=False)
4635 4635 name = Column('name', String(255), nullable=False)
4636 4636 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4637 4637 default=False)
4638 4638
4639 4639 settings = Column(
4640 4640 'settings_json', MutationObj.as_mutable(
4641 4641 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4642 4642 repo_id = Column(
4643 4643 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4644 4644 nullable=True, unique=None, default=None)
4645 4645 repo = relationship('Repository', lazy='joined')
4646 4646
4647 4647 repo_group_id = Column(
4648 4648 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4649 4649 nullable=True, unique=None, default=None)
4650 4650 repo_group = relationship('RepoGroup', lazy='joined')
4651 4651
4652 4652 @property
4653 4653 def scope(self):
4654 4654 if self.repo:
4655 4655 return repr(self.repo)
4656 4656 if self.repo_group:
4657 4657 if self.child_repos_only:
4658 4658 return repr(self.repo_group) + ' (child repos only)'
4659 4659 else:
4660 4660 return repr(self.repo_group) + ' (recursive)'
4661 4661 if self.child_repos_only:
4662 4662 return 'root_repos'
4663 4663 return 'global'
4664 4664
4665 4665 def __repr__(self):
4666 4666 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4667 4667
4668 4668
4669 4669 class RepoReviewRuleUser(Base, BaseModel):
4670 4670 __tablename__ = 'repo_review_rules_users'
4671 4671 __table_args__ = (
4672 4672 base_table_args
4673 4673 )
4674 4674
4675 4675 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4676 4676 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4677 4677 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4678 4678 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4679 4679 user = relationship('User')
4680 4680
4681 4681 def rule_data(self):
4682 4682 return {
4683 4683 'mandatory': self.mandatory
4684 4684 }
4685 4685
4686 4686
4687 4687 class RepoReviewRuleUserGroup(Base, BaseModel):
4688 4688 __tablename__ = 'repo_review_rules_users_groups'
4689 4689 __table_args__ = (
4690 4690 base_table_args
4691 4691 )
4692 4692
4693 4693 VOTE_RULE_ALL = -1
4694 4694
4695 4695 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4696 4696 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4697 4697 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4698 4698 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4699 4699 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4700 4700 users_group = relationship('UserGroup')
4701 4701
4702 4702 def rule_data(self):
4703 4703 return {
4704 4704 'mandatory': self.mandatory,
4705 4705 'vote_rule': self.vote_rule
4706 4706 }
4707 4707
4708 4708 @property
4709 4709 def vote_rule_label(self):
4710 4710 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4711 4711 return 'all must vote'
4712 4712 else:
4713 4713 return 'min. vote {}'.format(self.vote_rule)
4714 4714
4715 4715
4716 4716 class RepoReviewRule(Base, BaseModel):
4717 4717 __tablename__ = 'repo_review_rules'
4718 4718 __table_args__ = (
4719 4719 base_table_args
4720 4720 )
4721 4721
4722 4722 repo_review_rule_id = Column(
4723 4723 'repo_review_rule_id', Integer(), primary_key=True)
4724 4724 repo_id = Column(
4725 4725 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4726 4726 repo = relationship('Repository', backref='review_rules')
4727 4727
4728 4728 review_rule_name = Column('review_rule_name', String(255))
4729 4729 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4730 4730 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4731 4731 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4732 4732
4733 4733 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4734 4734 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4735 4735 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4736 4736 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4737 4737
4738 4738 rule_users = relationship('RepoReviewRuleUser')
4739 4739 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4740 4740
4741 4741 def _validate_pattern(self, value):
4742 4742 re.compile('^' + glob2re(value) + '$')
4743 4743
4744 4744 @hybrid_property
4745 4745 def source_branch_pattern(self):
4746 4746 return self._branch_pattern or '*'
4747 4747
4748 4748 @source_branch_pattern.setter
4749 4749 def source_branch_pattern(self, value):
4750 4750 self._validate_pattern(value)
4751 4751 self._branch_pattern = value or '*'
4752 4752
4753 4753 @hybrid_property
4754 4754 def target_branch_pattern(self):
4755 4755 return self._target_branch_pattern or '*'
4756 4756
4757 4757 @target_branch_pattern.setter
4758 4758 def target_branch_pattern(self, value):
4759 4759 self._validate_pattern(value)
4760 4760 self._target_branch_pattern = value or '*'
4761 4761
4762 4762 @hybrid_property
4763 4763 def file_pattern(self):
4764 4764 return self._file_pattern or '*'
4765 4765
4766 4766 @file_pattern.setter
4767 4767 def file_pattern(self, value):
4768 4768 self._validate_pattern(value)
4769 4769 self._file_pattern = value or '*'
4770 4770
4771 4771 def matches(self, source_branch, target_branch, files_changed):
4772 4772 """
4773 4773 Check if this review rule matches a branch/files in a pull request
4774 4774
4775 4775 :param source_branch: source branch name for the commit
4776 4776 :param target_branch: target branch name for the commit
4777 4777 :param files_changed: list of file paths changed in the pull request
4778 4778 """
4779 4779
4780 4780 source_branch = source_branch or ''
4781 4781 target_branch = target_branch or ''
4782 4782 files_changed = files_changed or []
4783 4783
4784 4784 branch_matches = True
4785 4785 if source_branch or target_branch:
4786 4786 if self.source_branch_pattern == '*':
4787 4787 source_branch_match = True
4788 4788 else:
4789 4789 if self.source_branch_pattern.startswith('re:'):
4790 4790 source_pattern = self.source_branch_pattern[3:]
4791 4791 else:
4792 4792 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4793 4793 source_branch_regex = re.compile(source_pattern)
4794 4794 source_branch_match = bool(source_branch_regex.search(source_branch))
4795 4795 if self.target_branch_pattern == '*':
4796 4796 target_branch_match = True
4797 4797 else:
4798 4798 if self.target_branch_pattern.startswith('re:'):
4799 4799 target_pattern = self.target_branch_pattern[3:]
4800 4800 else:
4801 4801 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4802 4802 target_branch_regex = re.compile(target_pattern)
4803 4803 target_branch_match = bool(target_branch_regex.search(target_branch))
4804 4804
4805 4805 branch_matches = source_branch_match and target_branch_match
4806 4806
4807 4807 files_matches = True
4808 4808 if self.file_pattern != '*':
4809 4809 files_matches = False
4810 4810 if self.file_pattern.startswith('re:'):
4811 4811 file_pattern = self.file_pattern[3:]
4812 4812 else:
4813 4813 file_pattern = glob2re(self.file_pattern)
4814 4814 file_regex = re.compile(file_pattern)
4815 4815 for filename in files_changed:
4816 4816 if file_regex.search(filename):
4817 4817 files_matches = True
4818 4818 break
4819 4819
4820 4820 return branch_matches and files_matches
4821 4821
4822 4822 @property
4823 4823 def review_users(self):
4824 4824 """ Returns the users which this rule applies to """
4825 4825
4826 4826 users = collections.OrderedDict()
4827 4827
4828 4828 for rule_user in self.rule_users:
4829 4829 if rule_user.user.active:
4830 4830 if rule_user.user not in users:
4831 4831 users[rule_user.user.username] = {
4832 4832 'user': rule_user.user,
4833 4833 'source': 'user',
4834 4834 'source_data': {},
4835 4835 'data': rule_user.rule_data()
4836 4836 }
4837 4837
4838 4838 for rule_user_group in self.rule_user_groups:
4839 4839 source_data = {
4840 4840 'user_group_id': rule_user_group.users_group.users_group_id,
4841 4841 'name': rule_user_group.users_group.users_group_name,
4842 4842 'members': len(rule_user_group.users_group.members)
4843 4843 }
4844 4844 for member in rule_user_group.users_group.members:
4845 4845 if member.user.active:
4846 4846 key = member.user.username
4847 4847 if key in users:
4848 4848 # skip this member as we have him already
4849 4849 # this prevents from override the "first" matched
4850 4850 # users with duplicates in multiple groups
4851 4851 continue
4852 4852
4853 4853 users[key] = {
4854 4854 'user': member.user,
4855 4855 'source': 'user_group',
4856 4856 'source_data': source_data,
4857 4857 'data': rule_user_group.rule_data()
4858 4858 }
4859 4859
4860 4860 return users
4861 4861
4862 4862 def user_group_vote_rule(self, user_id):
4863 4863
4864 4864 rules = []
4865 4865 if not self.rule_user_groups:
4866 4866 return rules
4867 4867
4868 4868 for user_group in self.rule_user_groups:
4869 4869 user_group_members = [x.user_id for x in user_group.users_group.members]
4870 4870 if user_id in user_group_members:
4871 4871 rules.append(user_group)
4872 4872 return rules
4873 4873
4874 4874 def __repr__(self):
4875 4875 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4876 4876 self.repo_review_rule_id, self.repo)
4877 4877
4878 4878
4879 4879 class ScheduleEntry(Base, BaseModel):
4880 4880 __tablename__ = 'schedule_entries'
4881 4881 __table_args__ = (
4882 4882 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4883 4883 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4884 4884 base_table_args,
4885 4885 )
4886 4886
4887 4887 schedule_types = ['crontab', 'timedelta', 'integer']
4888 4888 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4889 4889
4890 4890 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4891 4891 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4892 4892 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4893 4893
4894 4894 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4895 4895 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4896 4896
4897 4897 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4898 4898 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4899 4899
4900 4900 # task
4901 4901 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4902 4902 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4903 4903 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4904 4904 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4905 4905
4906 4906 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4907 4907 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4908 4908
4909 4909 @hybrid_property
4910 4910 def schedule_type(self):
4911 4911 return self._schedule_type
4912 4912
4913 4913 @schedule_type.setter
4914 4914 def schedule_type(self, val):
4915 4915 if val not in self.schedule_types:
4916 4916 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4917 4917 val, self.schedule_type))
4918 4918
4919 4919 self._schedule_type = val
4920 4920
4921 4921 @classmethod
4922 4922 def get_uid(cls, obj):
4923 4923 args = obj.task_args
4924 4924 kwargs = obj.task_kwargs
4925 4925 if isinstance(args, JsonRaw):
4926 4926 try:
4927 4927 args = json.loads(args)
4928 4928 except ValueError:
4929 4929 args = tuple()
4930 4930
4931 4931 if isinstance(kwargs, JsonRaw):
4932 4932 try:
4933 4933 kwargs = json.loads(kwargs)
4934 4934 except ValueError:
4935 4935 kwargs = dict()
4936 4936
4937 4937 dot_notation = obj.task_dot_notation
4938 4938 val = '.'.join(map(safe_str, [
4939 4939 sorted(dot_notation), args, sorted(kwargs.items())]))
4940 4940 return hashlib.sha1(val).hexdigest()
4941 4941
4942 4942 @classmethod
4943 4943 def get_by_schedule_name(cls, schedule_name):
4944 4944 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4945 4945
4946 4946 @classmethod
4947 4947 def get_by_schedule_id(cls, schedule_id):
4948 4948 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4949 4949
4950 4950 @property
4951 4951 def task(self):
4952 4952 return self.task_dot_notation
4953 4953
4954 4954 @property
4955 4955 def schedule(self):
4956 4956 from rhodecode.lib.celerylib.utils import raw_2_schedule
4957 4957 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4958 4958 return schedule
4959 4959
4960 4960 @property
4961 4961 def args(self):
4962 4962 try:
4963 4963 return list(self.task_args or [])
4964 4964 except ValueError:
4965 4965 return list()
4966 4966
4967 4967 @property
4968 4968 def kwargs(self):
4969 4969 try:
4970 4970 return dict(self.task_kwargs or {})
4971 4971 except ValueError:
4972 4972 return dict()
4973 4973
4974 4974 def _as_raw(self, val):
4975 4975 if hasattr(val, 'de_coerce'):
4976 4976 val = val.de_coerce()
4977 4977 if val:
4978 4978 val = json.dumps(val)
4979 4979
4980 4980 return val
4981 4981
4982 4982 @property
4983 4983 def schedule_definition_raw(self):
4984 4984 return self._as_raw(self.schedule_definition)
4985 4985
4986 4986 @property
4987 4987 def args_raw(self):
4988 4988 return self._as_raw(self.task_args)
4989 4989
4990 4990 @property
4991 4991 def kwargs_raw(self):
4992 4992 return self._as_raw(self.task_kwargs)
4993 4993
4994 4994 def __repr__(self):
4995 4995 return '<DB:ScheduleEntry({}:{})>'.format(
4996 4996 self.schedule_entry_id, self.schedule_name)
4997 4997
4998 4998
4999 4999 @event.listens_for(ScheduleEntry, 'before_update')
5000 5000 def update_task_uid(mapper, connection, target):
5001 5001 target.task_uid = ScheduleEntry.get_uid(target)
5002 5002
5003 5003
5004 5004 @event.listens_for(ScheduleEntry, 'before_insert')
5005 5005 def set_task_uid(mapper, connection, target):
5006 5006 target.task_uid = ScheduleEntry.get_uid(target)
5007 5007
5008 5008
5009 5009 class _BaseBranchPerms(BaseModel):
5010 5010 @classmethod
5011 5011 def compute_hash(cls, value):
5012 5012 return sha1_safe(value)
5013 5013
5014 5014 @hybrid_property
5015 5015 def branch_pattern(self):
5016 5016 return self._branch_pattern or '*'
5017 5017
5018 5018 @hybrid_property
5019 5019 def branch_hash(self):
5020 5020 return self._branch_hash
5021 5021
5022 5022 def _validate_glob(self, value):
5023 5023 re.compile('^' + glob2re(value) + '$')
5024 5024
5025 5025 @branch_pattern.setter
5026 5026 def branch_pattern(self, value):
5027 5027 self._validate_glob(value)
5028 5028 self._branch_pattern = value or '*'
5029 5029 # set the Hash when setting the branch pattern
5030 5030 self._branch_hash = self.compute_hash(self._branch_pattern)
5031 5031
5032 5032 def matches(self, branch):
5033 5033 """
5034 5034 Check if this the branch matches entry
5035 5035
5036 5036 :param branch: branch name for the commit
5037 5037 """
5038 5038
5039 5039 branch = branch or ''
5040 5040
5041 5041 branch_matches = True
5042 5042 if branch:
5043 5043 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5044 5044 branch_matches = bool(branch_regex.search(branch))
5045 5045
5046 5046 return branch_matches
5047 5047
5048 5048
5049 5049 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5050 5050 __tablename__ = 'user_to_repo_branch_permissions'
5051 5051 __table_args__ = (
5052 5052 base_table_args
5053 5053 )
5054 5054
5055 5055 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5056 5056
5057 5057 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5058 5058 repo = relationship('Repository', backref='user_branch_perms')
5059 5059
5060 5060 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5061 5061 permission = relationship('Permission')
5062 5062
5063 5063 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5064 5064 user_repo_to_perm = relationship('UserRepoToPerm')
5065 5065
5066 5066 rule_order = Column('rule_order', Integer(), nullable=False)
5067 5067 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5068 5068 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5069 5069
5070 5070 def __unicode__(self):
5071 5071 return u'<UserBranchPermission(%s => %r)>' % (
5072 5072 self.user_repo_to_perm, self.branch_pattern)
5073 5073
5074 5074
5075 5075 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5076 5076 __tablename__ = 'user_group_to_repo_branch_permissions'
5077 5077 __table_args__ = (
5078 5078 base_table_args
5079 5079 )
5080 5080
5081 5081 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5082 5082
5083 5083 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5084 5084 repo = relationship('Repository', backref='user_group_branch_perms')
5085 5085
5086 5086 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5087 5087 permission = relationship('Permission')
5088 5088
5089 5089 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5090 5090 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5091 5091
5092 5092 rule_order = Column('rule_order', Integer(), nullable=False)
5093 5093 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5094 5094 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5095 5095
5096 5096 def __unicode__(self):
5097 5097 return u'<UserBranchPermission(%s => %r)>' % (
5098 5098 self.user_group_repo_to_perm, self.branch_pattern)
5099 5099
5100 5100
5101 5101 class UserBookmark(Base, BaseModel):
5102 5102 __tablename__ = 'user_bookmarks'
5103 5103 __table_args__ = (
5104 5104 UniqueConstraint('user_id', 'bookmark_repo_id'),
5105 5105 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5106 5106 UniqueConstraint('user_id', 'bookmark_position'),
5107 5107 base_table_args
5108 5108 )
5109 5109
5110 5110 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5111 5111 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5112 5112 position = Column("bookmark_position", Integer(), nullable=False)
5113 5113 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5114 5114 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5115 5115 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5116 5116
5117 5117 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5118 5118 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5119 5119
5120 5120 user = relationship("User")
5121 5121
5122 5122 repository = relationship("Repository")
5123 5123 repository_group = relationship("RepoGroup")
5124 5124
5125 5125 @classmethod
5126 5126 def get_by_position_for_user(cls, position, user_id):
5127 5127 return cls.query() \
5128 5128 .filter(UserBookmark.user_id == user_id) \
5129 5129 .filter(UserBookmark.position == position).scalar()
5130 5130
5131 5131 @classmethod
5132 5132 def get_bookmarks_for_user(cls, user_id):
5133 5133 return cls.query() \
5134 5134 .filter(UserBookmark.user_id == user_id) \
5135 5135 .options(joinedload(UserBookmark.repository)) \
5136 5136 .options(joinedload(UserBookmark.repository_group)) \
5137 5137 .order_by(UserBookmark.position.asc()) \
5138 5138 .all()
5139 5139
5140 5140 def __unicode__(self):
5141 5141 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5142 5142
5143 5143
5144 5144 class FileStore(Base, BaseModel):
5145 5145 __tablename__ = 'file_store'
5146 5146 __table_args__ = (
5147 5147 base_table_args
5148 5148 )
5149 5149
5150 5150 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5151 5151 file_uid = Column('file_uid', String(1024), nullable=False)
5152 5152 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5153 5153 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5154 5154 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5155 5155
5156 5156 # sha256 hash
5157 5157 file_hash = Column('file_hash', String(512), nullable=False)
5158 5158 file_size = Column('file_size', BigInteger(), nullable=False)
5159 5159
5160 5160 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5161 5161 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5162 5162 accessed_count = Column('accessed_count', Integer(), default=0)
5163 5163
5164 5164 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5165 5165
5166 5166 # if repo/repo_group reference is set, check for permissions
5167 5167 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5168 5168
5169 5169 # hidden defines an attachment that should be hidden from showing in artifact listing
5170 5170 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5171 5171
5172 5172 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5173 5173 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5174 5174
5175 5175 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5176 5176
5177 5177 # scope limited to user, which requester have access to
5178 5178 scope_user_id = Column(
5179 5179 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5180 5180 nullable=True, unique=None, default=None)
5181 5181 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5182 5182
5183 5183 # scope limited to user group, which requester have access to
5184 5184 scope_user_group_id = Column(
5185 5185 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5186 5186 nullable=True, unique=None, default=None)
5187 5187 user_group = relationship('UserGroup', lazy='joined')
5188 5188
5189 5189 # scope limited to repo, which requester have access to
5190 5190 scope_repo_id = Column(
5191 5191 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5192 5192 nullable=True, unique=None, default=None)
5193 5193 repo = relationship('Repository', lazy='joined')
5194 5194
5195 5195 # scope limited to repo group, which requester have access to
5196 5196 scope_repo_group_id = Column(
5197 5197 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5198 5198 nullable=True, unique=None, default=None)
5199 5199 repo_group = relationship('RepoGroup', lazy='joined')
5200 5200
5201 5201 @classmethod
5202 5202 def get_by_store_uid(cls, file_store_uid):
5203 5203 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5204 5204
5205 5205 @classmethod
5206 5206 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5207 5207 file_description='', enabled=True, hidden=False, check_acl=True,
5208 5208 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5209 5209
5210 5210 store_entry = FileStore()
5211 5211 store_entry.file_uid = file_uid
5212 5212 store_entry.file_display_name = file_display_name
5213 5213 store_entry.file_org_name = filename
5214 5214 store_entry.file_size = file_size
5215 5215 store_entry.file_hash = file_hash
5216 5216 store_entry.file_description = file_description
5217 5217
5218 5218 store_entry.check_acl = check_acl
5219 5219 store_entry.enabled = enabled
5220 5220 store_entry.hidden = hidden
5221 5221
5222 5222 store_entry.user_id = user_id
5223 5223 store_entry.scope_user_id = scope_user_id
5224 5224 store_entry.scope_repo_id = scope_repo_id
5225 5225 store_entry.scope_repo_group_id = scope_repo_group_id
5226 5226
5227 5227 return store_entry
5228 5228
5229 5229 @classmethod
5230 5230 def store_metadata(cls, file_store_id, args, commit=True):
5231 5231 file_store = FileStore.get(file_store_id)
5232 5232 if file_store is None:
5233 5233 return
5234 5234
5235 5235 for section, key, value, value_type in args:
5236 5236 has_key = FileStoreMetadata().query() \
5237 5237 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5238 5238 .filter(FileStoreMetadata.file_store_meta_section == section) \
5239 5239 .filter(FileStoreMetadata.file_store_meta_key == key) \
5240 5240 .scalar()
5241 5241 if has_key:
5242 5242 msg = 'key `{}` already defined under section `{}` for this file.'\
5243 5243 .format(key, section)
5244 5244 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5245 5245
5246 5246 # NOTE(marcink): raises ArtifactMetadataBadValueType
5247 5247 FileStoreMetadata.valid_value_type(value_type)
5248 5248
5249 5249 meta_entry = FileStoreMetadata()
5250 5250 meta_entry.file_store = file_store
5251 5251 meta_entry.file_store_meta_section = section
5252 5252 meta_entry.file_store_meta_key = key
5253 5253 meta_entry.file_store_meta_value_type = value_type
5254 5254 meta_entry.file_store_meta_value = value
5255 5255
5256 5256 Session().add(meta_entry)
5257 5257
5258 5258 try:
5259 5259 if commit:
5260 5260 Session().commit()
5261 5261 except IntegrityError:
5262 5262 Session().rollback()
5263 5263 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5264 5264
5265 5265 @classmethod
5266 5266 def bump_access_counter(cls, file_uid, commit=True):
5267 5267 FileStore().query()\
5268 5268 .filter(FileStore.file_uid == file_uid)\
5269 5269 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5270 5270 FileStore.accessed_on: datetime.datetime.now()})
5271 5271 if commit:
5272 5272 Session().commit()
5273 5273
5274 5274 def __json__(self):
5275 5275 data = {
5276 5276 'filename': self.file_display_name,
5277 5277 'filename_org': self.file_org_name,
5278 5278 'file_uid': self.file_uid,
5279 5279 'description': self.file_description,
5280 5280 'hidden': self.hidden,
5281 5281 'size': self.file_size,
5282 5282 'created_on': self.created_on,
5283 5283 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5284 5284 'downloaded_times': self.accessed_count,
5285 5285 'sha256': self.file_hash,
5286 5286 'metadata': self.file_metadata,
5287 5287 }
5288 5288
5289 5289 return data
5290 5290
5291 5291 def __repr__(self):
5292 5292 return '<FileStore({})>'.format(self.file_store_id)
5293 5293
5294 5294
5295 5295 class FileStoreMetadata(Base, BaseModel):
5296 5296 __tablename__ = 'file_store_metadata'
5297 5297 __table_args__ = (
5298 5298 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5299 5299 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5300 5300 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5301 5301 base_table_args
5302 5302 )
5303 5303 SETTINGS_TYPES = {
5304 5304 'str': safe_str,
5305 5305 'int': safe_int,
5306 5306 'unicode': safe_unicode,
5307 5307 'bool': str2bool,
5308 5308 'list': functools.partial(aslist, sep=',')
5309 5309 }
5310 5310
5311 5311 file_store_meta_id = Column(
5312 5312 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5313 5313 primary_key=True)
5314 5314 _file_store_meta_section = Column(
5315 5315 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5316 5316 nullable=True, unique=None, default=None)
5317 5317 _file_store_meta_section_hash = Column(
5318 5318 "file_store_meta_section_hash", String(255),
5319 5319 nullable=True, unique=None, default=None)
5320 5320 _file_store_meta_key = Column(
5321 5321 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5322 5322 nullable=True, unique=None, default=None)
5323 5323 _file_store_meta_key_hash = Column(
5324 5324 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5325 5325 _file_store_meta_value = Column(
5326 5326 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5327 5327 nullable=True, unique=None, default=None)
5328 5328 _file_store_meta_value_type = Column(
5329 5329 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5330 5330 default='unicode')
5331 5331
5332 5332 file_store_id = Column(
5333 5333 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5334 5334 nullable=True, unique=None, default=None)
5335 5335
5336 5336 file_store = relationship('FileStore', lazy='joined')
5337 5337
5338 5338 @classmethod
5339 5339 def valid_value_type(cls, value):
5340 5340 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5341 5341 raise ArtifactMetadataBadValueType(
5342 5342 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5343 5343
5344 5344 @hybrid_property
5345 5345 def file_store_meta_section(self):
5346 5346 return self._file_store_meta_section
5347 5347
5348 5348 @file_store_meta_section.setter
5349 5349 def file_store_meta_section(self, value):
5350 5350 self._file_store_meta_section = value
5351 5351 self._file_store_meta_section_hash = _hash_key(value)
5352 5352
5353 5353 @hybrid_property
5354 5354 def file_store_meta_key(self):
5355 5355 return self._file_store_meta_key
5356 5356
5357 5357 @file_store_meta_key.setter
5358 5358 def file_store_meta_key(self, value):
5359 5359 self._file_store_meta_key = value
5360 5360 self._file_store_meta_key_hash = _hash_key(value)
5361 5361
5362 5362 @hybrid_property
5363 5363 def file_store_meta_value(self):
5364 5364 val = self._file_store_meta_value
5365 5365
5366 5366 if self._file_store_meta_value_type:
5367 5367 # e.g unicode.encrypted == unicode
5368 5368 _type = self._file_store_meta_value_type.split('.')[0]
5369 5369 # decode the encrypted value if it's encrypted field type
5370 5370 if '.encrypted' in self._file_store_meta_value_type:
5371 5371 cipher = EncryptedTextValue()
5372 5372 val = safe_unicode(cipher.process_result_value(val, None))
5373 5373 # do final type conversion
5374 5374 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5375 5375 val = converter(val)
5376 5376
5377 5377 return val
5378 5378
5379 5379 @file_store_meta_value.setter
5380 5380 def file_store_meta_value(self, val):
5381 5381 val = safe_unicode(val)
5382 5382 # encode the encrypted value
5383 5383 if '.encrypted' in self.file_store_meta_value_type:
5384 5384 cipher = EncryptedTextValue()
5385 5385 val = safe_unicode(cipher.process_bind_param(val, None))
5386 5386 self._file_store_meta_value = val
5387 5387
5388 5388 @hybrid_property
5389 5389 def file_store_meta_value_type(self):
5390 5390 return self._file_store_meta_value_type
5391 5391
5392 5392 @file_store_meta_value_type.setter
5393 5393 def file_store_meta_value_type(self, val):
5394 5394 # e.g unicode.encrypted
5395 5395 self.valid_value_type(val)
5396 5396 self._file_store_meta_value_type = val
5397 5397
5398 5398 def __json__(self):
5399 5399 data = {
5400 5400 'artifact': self.file_store.file_uid,
5401 5401 'section': self.file_store_meta_section,
5402 5402 'key': self.file_store_meta_key,
5403 5403 'value': self.file_store_meta_value,
5404 5404 }
5405 5405
5406 5406 return data
5407 5407
5408 5408 def __repr__(self):
5409 5409 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5410 5410 self.file_store_meta_key, self.file_store_meta_value)
5411 5411
5412 5412
5413 5413 class DbMigrateVersion(Base, BaseModel):
5414 5414 __tablename__ = 'db_migrate_version'
5415 5415 __table_args__ = (
5416 5416 base_table_args,
5417 5417 )
5418 5418
5419 5419 repository_id = Column('repository_id', String(250), primary_key=True)
5420 5420 repository_path = Column('repository_path', Text)
5421 5421 version = Column('version', Integer)
5422 5422
5423 5423 @classmethod
5424 5424 def set_version(cls, version):
5425 5425 """
5426 5426 Helper for forcing a different version, usually for debugging purposes via ishell.
5427 5427 """
5428 5428 ver = DbMigrateVersion.query().first()
5429 5429 ver.version = version
5430 5430 Session().commit()
5431 5431
5432 5432
5433 5433 class DbSession(Base, BaseModel):
5434 5434 __tablename__ = 'db_session'
5435 5435 __table_args__ = (
5436 5436 base_table_args,
5437 5437 )
5438 5438
5439 5439 def __repr__(self):
5440 5440 return '<DB:DbSession({})>'.format(self.id)
5441 5441
5442 5442 id = Column('id', Integer())
5443 5443 namespace = Column('namespace', String(255), primary_key=True)
5444 5444 accessed = Column('accessed', DateTime, nullable=False)
5445 5445 created = Column('created', DateTime, nullable=False)
5446 5446 data = Column('data', PickleType, nullable=False)
@@ -1,1744 +1,1759 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 opened_by=None, order_by=None,
140 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
141 statuses=None, opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=False):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 if search_q:
150 like_expression = u'%{}%'.format(safe_unicode(search_q))
151 q = q.filter(or_(
152 cast(PullRequest.pull_request_id, String).ilike(like_expression),
153 PullRequest.title.ilike(like_expression),
154 PullRequest.description.ilike(like_expression),
155 ))
156
149 157 # source or target
150 158 if repo and source:
151 159 q = q.filter(PullRequest.source_repo == repo)
152 160 elif repo:
153 161 q = q.filter(PullRequest.target_repo == repo)
154 162
155 163 # closed,opened
156 164 if statuses:
157 165 q = q.filter(PullRequest.status.in_(statuses))
158 166
159 167 # opened by filter
160 168 if opened_by:
161 169 q = q.filter(PullRequest.user_id.in_(opened_by))
162 170
163 171 # only get those that are in "created" state
164 172 if only_created:
165 173 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166 174
167 175 if order_by:
168 176 order_map = {
169 177 'name_raw': PullRequest.pull_request_id,
170 178 'id': PullRequest.pull_request_id,
171 179 'title': PullRequest.title,
172 180 'updated_on_raw': PullRequest.updated_on,
173 181 'target_repo': PullRequest.target_repo_id
174 182 }
175 183 if order_dir == 'asc':
176 184 q = q.order_by(order_map[order_by].asc())
177 185 else:
178 186 q = q.order_by(order_map[order_by].desc())
179 187
180 188 return q
181 189
182 def count_all(self, repo_name, source=False, statuses=None,
190 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
183 191 opened_by=None):
184 192 """
185 193 Count the number of pull requests for a specific repository.
186 194
187 195 :param repo_name: target or source repo
196 :param search_q: filter by text
188 197 :param source: boolean flag to specify if repo_name refers to source
189 198 :param statuses: list of pull request statuses
190 199 :param opened_by: author user of the pull request
191 200 :returns: int number of pull requests
192 201 """
193 202 q = self._prepare_get_all_query(
194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
203 repo_name, search_q=search_q, source=source, statuses=statuses,
204 opened_by=opened_by)
195 205
196 206 return q.count()
197 207
198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 offset=0, length=None, order_by=None, order_dir='desc'):
208 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
209 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
200 210 """
201 211 Get all pull requests for a specific repository.
202 212
203 213 :param repo_name: target or source repo
214 :param search_q: filter by text
204 215 :param source: boolean flag to specify if repo_name refers to source
205 216 :param statuses: list of pull request statuses
206 217 :param opened_by: author user of the pull request
207 218 :param offset: pagination offset
208 219 :param length: length of returned list
209 220 :param order_by: order of the returned list
210 221 :param order_dir: 'asc' or 'desc' ordering direction
211 222 :returns: list of pull requests
212 223 """
213 224 q = self._prepare_get_all_query(
214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 order_by=order_by, order_dir=order_dir)
225 repo_name, search_q=search_q, source=source, statuses=statuses,
226 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
216 227
217 228 if length:
218 229 pull_requests = q.limit(length).offset(offset).all()
219 230 else:
220 231 pull_requests = q.all()
221 232
222 233 return pull_requests
223 234
224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
235 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
225 236 opened_by=None):
226 237 """
227 238 Count the number of pull requests for a specific repository that are
228 239 awaiting review.
229 240
230 241 :param repo_name: target or source repo
242 :param search_q: filter by text
231 243 :param source: boolean flag to specify if repo_name refers to source
232 244 :param statuses: list of pull request statuses
233 245 :param opened_by: author user of the pull request
234 246 :returns: int number of pull requests
235 247 """
236 248 pull_requests = self.get_awaiting_review(
237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
249 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
238 250
239 251 return len(pull_requests)
240 252
241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
253 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
242 254 opened_by=None, offset=0, length=None,
243 255 order_by=None, order_dir='desc'):
244 256 """
245 257 Get all pull requests for a specific repository that are awaiting
246 258 review.
247 259
248 260 :param repo_name: target or source repo
261 :param search_q: filter by text
249 262 :param source: boolean flag to specify if repo_name refers to source
250 263 :param statuses: list of pull request statuses
251 264 :param opened_by: author user of the pull request
252 265 :param offset: pagination offset
253 266 :param length: length of returned list
254 267 :param order_by: order of the returned list
255 268 :param order_dir: 'asc' or 'desc' ordering direction
256 269 :returns: list of pull requests
257 270 """
258 271 pull_requests = self.get_all(
259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 order_by=order_by, order_dir=order_dir)
272 repo_name, search_q=search_q, source=source, statuses=statuses,
273 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
261 274
262 275 _filtered_pull_requests = []
263 276 for pr in pull_requests:
264 277 status = pr.calculated_review_status()
265 278 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 279 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 280 _filtered_pull_requests.append(pr)
268 281 if length:
269 282 return _filtered_pull_requests[offset:offset+length]
270 283 else:
271 284 return _filtered_pull_requests
272 285
273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
286 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
274 287 opened_by=None, user_id=None):
275 288 """
276 289 Count the number of pull requests for a specific repository that are
277 290 awaiting review from a specific user.
278 291
279 292 :param repo_name: target or source repo
293 :param search_q: filter by text
280 294 :param source: boolean flag to specify if repo_name refers to source
281 295 :param statuses: list of pull request statuses
282 296 :param opened_by: author user of the pull request
283 297 :param user_id: reviewer user of the pull request
284 298 :returns: int number of pull requests
285 299 """
286 300 pull_requests = self.get_awaiting_my_review(
287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 user_id=user_id)
301 repo_name, search_q=search_q, source=source, statuses=statuses,
302 opened_by=opened_by, user_id=user_id)
289 303
290 304 return len(pull_requests)
291 305
292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
306 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
293 307 opened_by=None, user_id=None, offset=0,
294 308 length=None, order_by=None, order_dir='desc'):
295 309 """
296 310 Get all pull requests for a specific repository that are awaiting
297 311 review from a specific user.
298 312
299 313 :param repo_name: target or source repo
314 :param search_q: filter by text
300 315 :param source: boolean flag to specify if repo_name refers to source
301 316 :param statuses: list of pull request statuses
302 317 :param opened_by: author user of the pull request
303 318 :param user_id: reviewer user of the pull request
304 319 :param offset: pagination offset
305 320 :param length: length of returned list
306 321 :param order_by: order of the returned list
307 322 :param order_dir: 'asc' or 'desc' ordering direction
308 323 :returns: list of pull requests
309 324 """
310 325 pull_requests = self.get_all(
311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 order_by=order_by, order_dir=order_dir)
326 repo_name, search_q=search_q, source=source, statuses=statuses,
327 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
313 328
314 329 _my = PullRequestModel().get_not_reviewed(user_id)
315 330 my_participation = []
316 331 for pr in pull_requests:
317 332 if pr in _my:
318 333 my_participation.append(pr)
319 334 _filtered_pull_requests = my_participation
320 335 if length:
321 336 return _filtered_pull_requests[offset:offset+length]
322 337 else:
323 338 return _filtered_pull_requests
324 339
325 340 def get_not_reviewed(self, user_id):
326 341 return [
327 342 x.pull_request for x in PullRequestReviewers.query().filter(
328 343 PullRequestReviewers.user_id == user_id).all()
329 344 ]
330 345
331 346 def _prepare_participating_query(self, user_id=None, statuses=None,
332 347 order_by=None, order_dir='desc'):
333 348 q = PullRequest.query()
334 349 if user_id:
335 350 reviewers_subquery = Session().query(
336 351 PullRequestReviewers.pull_request_id).filter(
337 352 PullRequestReviewers.user_id == user_id).subquery()
338 353 user_filter = or_(
339 354 PullRequest.user_id == user_id,
340 355 PullRequest.pull_request_id.in_(reviewers_subquery)
341 356 )
342 357 q = PullRequest.query().filter(user_filter)
343 358
344 359 # closed,opened
345 360 if statuses:
346 361 q = q.filter(PullRequest.status.in_(statuses))
347 362
348 363 if order_by:
349 364 order_map = {
350 365 'name_raw': PullRequest.pull_request_id,
351 366 'title': PullRequest.title,
352 367 'updated_on_raw': PullRequest.updated_on,
353 368 'target_repo': PullRequest.target_repo_id
354 369 }
355 370 if order_dir == 'asc':
356 371 q = q.order_by(order_map[order_by].asc())
357 372 else:
358 373 q = q.order_by(order_map[order_by].desc())
359 374
360 375 return q
361 376
362 377 def count_im_participating_in(self, user_id=None, statuses=None):
363 378 q = self._prepare_participating_query(user_id, statuses=statuses)
364 379 return q.count()
365 380
366 381 def get_im_participating_in(
367 382 self, user_id=None, statuses=None, offset=0,
368 383 length=None, order_by=None, order_dir='desc'):
369 384 """
370 385 Get all Pull requests that i'm participating in, or i have opened
371 386 """
372 387
373 388 q = self._prepare_participating_query(
374 389 user_id, statuses=statuses, order_by=order_by,
375 390 order_dir=order_dir)
376 391
377 392 if length:
378 393 pull_requests = q.limit(length).offset(offset).all()
379 394 else:
380 395 pull_requests = q.all()
381 396
382 397 return pull_requests
383 398
384 399 def get_versions(self, pull_request):
385 400 """
386 401 returns version of pull request sorted by ID descending
387 402 """
388 403 return PullRequestVersion.query()\
389 404 .filter(PullRequestVersion.pull_request == pull_request)\
390 405 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 406 .all()
392 407
393 408 def get_pr_version(self, pull_request_id, version=None):
394 409 at_version = None
395 410
396 411 if version and version == 'latest':
397 412 pull_request_ver = PullRequest.get(pull_request_id)
398 413 pull_request_obj = pull_request_ver
399 414 _org_pull_request_obj = pull_request_obj
400 415 at_version = 'latest'
401 416 elif version:
402 417 pull_request_ver = PullRequestVersion.get_or_404(version)
403 418 pull_request_obj = pull_request_ver
404 419 _org_pull_request_obj = pull_request_ver.pull_request
405 420 at_version = pull_request_ver.pull_request_version_id
406 421 else:
407 422 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 423 pull_request_id)
409 424
410 425 pull_request_display_obj = PullRequest.get_pr_display_object(
411 426 pull_request_obj, _org_pull_request_obj)
412 427
413 428 return _org_pull_request_obj, pull_request_obj, \
414 429 pull_request_display_obj, at_version
415 430
416 431 def create(self, created_by, source_repo, source_ref, target_repo,
417 432 target_ref, revisions, reviewers, title, description=None,
418 433 description_renderer=None,
419 434 reviewer_data=None, translator=None, auth_user=None):
420 435 translator = translator or get_current_request().translate
421 436
422 437 created_by_user = self._get_user(created_by)
423 438 auth_user = auth_user or created_by_user.AuthUser()
424 439 source_repo = self._get_repo(source_repo)
425 440 target_repo = self._get_repo(target_repo)
426 441
427 442 pull_request = PullRequest()
428 443 pull_request.source_repo = source_repo
429 444 pull_request.source_ref = source_ref
430 445 pull_request.target_repo = target_repo
431 446 pull_request.target_ref = target_ref
432 447 pull_request.revisions = revisions
433 448 pull_request.title = title
434 449 pull_request.description = description
435 450 pull_request.description_renderer = description_renderer
436 451 pull_request.author = created_by_user
437 452 pull_request.reviewer_data = reviewer_data
438 453 pull_request.pull_request_state = pull_request.STATE_CREATING
439 454 Session().add(pull_request)
440 455 Session().flush()
441 456
442 457 reviewer_ids = set()
443 458 # members / reviewers
444 459 for reviewer_object in reviewers:
445 460 user_id, reasons, mandatory, rules = reviewer_object
446 461 user = self._get_user(user_id)
447 462
448 463 # skip duplicates
449 464 if user.user_id in reviewer_ids:
450 465 continue
451 466
452 467 reviewer_ids.add(user.user_id)
453 468
454 469 reviewer = PullRequestReviewers()
455 470 reviewer.user = user
456 471 reviewer.pull_request = pull_request
457 472 reviewer.reasons = reasons
458 473 reviewer.mandatory = mandatory
459 474
460 475 # NOTE(marcink): pick only first rule for now
461 476 rule_id = list(rules)[0] if rules else None
462 477 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 478 if rule:
464 479 review_group = rule.user_group_vote_rule(user_id)
465 480 # we check if this particular reviewer is member of a voting group
466 481 if review_group:
467 482 # NOTE(marcink):
468 483 # can be that user is member of more but we pick the first same,
469 484 # same as default reviewers algo
470 485 review_group = review_group[0]
471 486
472 487 rule_data = {
473 488 'rule_name':
474 489 rule.review_rule_name,
475 490 'rule_user_group_entry_id':
476 491 review_group.repo_review_rule_users_group_id,
477 492 'rule_user_group_name':
478 493 review_group.users_group.users_group_name,
479 494 'rule_user_group_members':
480 495 [x.user.username for x in review_group.users_group.members],
481 496 'rule_user_group_members_id':
482 497 [x.user.user_id for x in review_group.users_group.members],
483 498 }
484 499 # e.g {'vote_rule': -1, 'mandatory': True}
485 500 rule_data.update(review_group.rule_data())
486 501
487 502 reviewer.rule_data = rule_data
488 503
489 504 Session().add(reviewer)
490 505 Session().flush()
491 506
492 507 # Set approval status to "Under Review" for all commits which are
493 508 # part of this pull request.
494 509 ChangesetStatusModel().set_status(
495 510 repo=target_repo,
496 511 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 512 user=created_by_user,
498 513 pull_request=pull_request
499 514 )
500 515 # we commit early at this point. This has to do with a fact
501 516 # that before queries do some row-locking. And because of that
502 517 # we need to commit and finish transaction before below validate call
503 518 # that for large repos could be long resulting in long row locks
504 519 Session().commit()
505 520
506 521 # prepare workspace, and run initial merge simulation. Set state during that
507 522 # operation
508 523 pull_request = PullRequest.get(pull_request.pull_request_id)
509 524
510 525 # set as merging, for merge simulation, and if finished to created so we mark
511 526 # simulation is working fine
512 527 with pull_request.set_state(PullRequest.STATE_MERGING,
513 528 final_state=PullRequest.STATE_CREATED) as state_obj:
514 529 MergeCheck.validate(
515 530 pull_request, auth_user=auth_user, translator=translator)
516 531
517 532 self.notify_reviewers(pull_request, reviewer_ids)
518 533 self.trigger_pull_request_hook(
519 534 pull_request, created_by_user, 'create')
520 535
521 536 creation_data = pull_request.get_api_data(with_merge_state=False)
522 537 self._log_audit_action(
523 538 'repo.pull_request.create', {'data': creation_data},
524 539 auth_user, pull_request)
525 540
526 541 return pull_request
527 542
528 543 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 544 pull_request = self.__get_pull_request(pull_request)
530 545 target_scm = pull_request.target_repo.scm_instance()
531 546 if action == 'create':
532 547 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 548 elif action == 'merge':
534 549 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 550 elif action == 'close':
536 551 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 552 elif action == 'review_status_change':
538 553 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 554 elif action == 'update':
540 555 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 556 elif action == 'comment':
542 557 # dummy hook ! for comment. We want this function to handle all cases
543 558 def trigger_hook(*args, **kwargs):
544 559 pass
545 560 comment = data['comment']
546 561 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 562 else:
548 563 return
549 564
550 565 trigger_hook(
551 566 username=user.username,
552 567 repo_name=pull_request.target_repo.repo_name,
553 568 repo_alias=target_scm.alias,
554 569 pull_request=pull_request,
555 570 data=data)
556 571
557 572 def _get_commit_ids(self, pull_request):
558 573 """
559 574 Return the commit ids of the merged pull request.
560 575
561 576 This method is not dealing correctly yet with the lack of autoupdates
562 577 nor with the implicit target updates.
563 578 For example: if a commit in the source repo is already in the target it
564 579 will be reported anyways.
565 580 """
566 581 merge_rev = pull_request.merge_rev
567 582 if merge_rev is None:
568 583 raise ValueError('This pull request was not merged yet')
569 584
570 585 commit_ids = list(pull_request.revisions)
571 586 if merge_rev not in commit_ids:
572 587 commit_ids.append(merge_rev)
573 588
574 589 return commit_ids
575 590
576 591 def merge_repo(self, pull_request, user, extras):
577 592 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 593 extras['user_agent'] = 'internal-merge'
579 594 merge_state = self._merge_pull_request(pull_request, user, extras)
580 595 if merge_state.executed:
581 596 log.debug("Merge was successful, updating the pull request comments.")
582 597 self._comment_and_close_pr(pull_request, user, merge_state)
583 598
584 599 self._log_audit_action(
585 600 'repo.pull_request.merge',
586 601 {'merge_state': merge_state.__dict__},
587 602 user, pull_request)
588 603
589 604 else:
590 605 log.warn("Merge failed, not updating the pull request.")
591 606 return merge_state
592 607
593 608 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 609 target_vcs = pull_request.target_repo.scm_instance()
595 610 source_vcs = pull_request.source_repo.scm_instance()
596 611
597 612 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 613 pr_id=pull_request.pull_request_id,
599 614 pr_title=pull_request.title,
600 615 source_repo=source_vcs.name,
601 616 source_ref_name=pull_request.source_ref_parts.name,
602 617 target_repo=target_vcs.name,
603 618 target_ref_name=pull_request.target_ref_parts.name,
604 619 )
605 620
606 621 workspace_id = self._workspace_id(pull_request)
607 622 repo_id = pull_request.target_repo.repo_id
608 623 use_rebase = self._use_rebase_for_merging(pull_request)
609 624 close_branch = self._close_branch_before_merging(pull_request)
610 625
611 626 target_ref = self._refresh_reference(
612 627 pull_request.target_ref_parts, target_vcs)
613 628
614 629 callback_daemon, extras = prepare_callback_daemon(
615 630 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 631 host=vcs_settings.HOOKS_HOST,
617 632 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618 633
619 634 with callback_daemon:
620 635 # TODO: johbo: Implement a clean way to run a config_override
621 636 # for a single call.
622 637 target_vcs.config.set(
623 638 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624 639
625 640 user_name = user.short_contact
626 641 merge_state = target_vcs.merge(
627 642 repo_id, workspace_id, target_ref, source_vcs,
628 643 pull_request.source_ref_parts,
629 644 user_name=user_name, user_email=user.email,
630 645 message=message, use_rebase=use_rebase,
631 646 close_branch=close_branch)
632 647 return merge_state
633 648
634 649 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 650 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 651 pull_request.updated_on = datetime.datetime.now()
637 652 close_msg = close_msg or 'Pull request merged and closed'
638 653
639 654 CommentsModel().create(
640 655 text=safe_unicode(close_msg),
641 656 repo=pull_request.target_repo.repo_id,
642 657 user=user.user_id,
643 658 pull_request=pull_request.pull_request_id,
644 659 f_path=None,
645 660 line_no=None,
646 661 closing_pr=True
647 662 )
648 663
649 664 Session().add(pull_request)
650 665 Session().flush()
651 666 # TODO: paris: replace invalidation with less radical solution
652 667 ScmModel().mark_for_invalidation(
653 668 pull_request.target_repo.repo_name)
654 669 self.trigger_pull_request_hook(pull_request, user, 'merge')
655 670
656 671 def has_valid_update_type(self, pull_request):
657 672 source_ref_type = pull_request.source_ref_parts.type
658 673 return source_ref_type in self.REF_TYPES
659 674
660 675 def update_commits(self, pull_request):
661 676 """
662 677 Get the updated list of commits for the pull request
663 678 and return the new pull request version and the list
664 679 of commits processed by this update action
665 680 """
666 681 pull_request = self.__get_pull_request(pull_request)
667 682 source_ref_type = pull_request.source_ref_parts.type
668 683 source_ref_name = pull_request.source_ref_parts.name
669 684 source_ref_id = pull_request.source_ref_parts.commit_id
670 685
671 686 target_ref_type = pull_request.target_ref_parts.type
672 687 target_ref_name = pull_request.target_ref_parts.name
673 688 target_ref_id = pull_request.target_ref_parts.commit_id
674 689
675 690 if not self.has_valid_update_type(pull_request):
676 691 log.debug("Skipping update of pull request %s due to ref type: %s",
677 692 pull_request, source_ref_type)
678 693 return UpdateResponse(
679 694 executed=False,
680 695 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 696 old=pull_request, new=None, changes=None,
682 697 source_changed=False, target_changed=False)
683 698
684 699 # source repo
685 700 source_repo = pull_request.source_repo.scm_instance()
686 701
687 702 try:
688 703 source_commit = source_repo.get_commit(commit_id=source_ref_name)
689 704 except CommitDoesNotExistError:
690 705 return UpdateResponse(
691 706 executed=False,
692 707 reason=UpdateFailureReason.MISSING_SOURCE_REF,
693 708 old=pull_request, new=None, changes=None,
694 709 source_changed=False, target_changed=False)
695 710
696 711 source_changed = source_ref_id != source_commit.raw_id
697 712
698 713 # target repo
699 714 target_repo = pull_request.target_repo.scm_instance()
700 715
701 716 try:
702 717 target_commit = target_repo.get_commit(commit_id=target_ref_name)
703 718 except CommitDoesNotExistError:
704 719 return UpdateResponse(
705 720 executed=False,
706 721 reason=UpdateFailureReason.MISSING_TARGET_REF,
707 722 old=pull_request, new=None, changes=None,
708 723 source_changed=False, target_changed=False)
709 724 target_changed = target_ref_id != target_commit.raw_id
710 725
711 726 if not (source_changed or target_changed):
712 727 log.debug("Nothing changed in pull request %s", pull_request)
713 728 return UpdateResponse(
714 729 executed=False,
715 730 reason=UpdateFailureReason.NO_CHANGE,
716 731 old=pull_request, new=None, changes=None,
717 732 source_changed=target_changed, target_changed=source_changed)
718 733
719 734 change_in_found = 'target repo' if target_changed else 'source repo'
720 735 log.debug('Updating pull request because of change in %s detected',
721 736 change_in_found)
722 737
723 738 # Finally there is a need for an update, in case of source change
724 739 # we create a new version, else just an update
725 740 if source_changed:
726 741 pull_request_version = self._create_version_from_snapshot(pull_request)
727 742 self._link_comments_to_version(pull_request_version)
728 743 else:
729 744 try:
730 745 ver = pull_request.versions[-1]
731 746 except IndexError:
732 747 ver = None
733 748
734 749 pull_request.pull_request_version_id = \
735 750 ver.pull_request_version_id if ver else None
736 751 pull_request_version = pull_request
737 752
738 753 try:
739 754 if target_ref_type in self.REF_TYPES:
740 755 target_commit = target_repo.get_commit(target_ref_name)
741 756 else:
742 757 target_commit = target_repo.get_commit(target_ref_id)
743 758 except CommitDoesNotExistError:
744 759 return UpdateResponse(
745 760 executed=False,
746 761 reason=UpdateFailureReason.MISSING_TARGET_REF,
747 762 old=pull_request, new=None, changes=None,
748 763 source_changed=source_changed, target_changed=target_changed)
749 764
750 765 # re-compute commit ids
751 766 old_commit_ids = pull_request.revisions
752 767 pre_load = ["author", "date", "message", "branch"]
753 768 commit_ranges = target_repo.compare(
754 769 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
755 770 pre_load=pre_load)
756 771
757 772 ancestor = source_repo.get_common_ancestor(
758 773 source_commit.raw_id, target_commit.raw_id, target_repo)
759 774
760 775 pull_request.source_ref = '%s:%s:%s' % (
761 776 source_ref_type, source_ref_name, source_commit.raw_id)
762 777 pull_request.target_ref = '%s:%s:%s' % (
763 778 target_ref_type, target_ref_name, ancestor)
764 779
765 780 pull_request.revisions = [
766 781 commit.raw_id for commit in reversed(commit_ranges)]
767 782 pull_request.updated_on = datetime.datetime.now()
768 783 Session().add(pull_request)
769 784 new_commit_ids = pull_request.revisions
770 785
771 786 old_diff_data, new_diff_data = self._generate_update_diffs(
772 787 pull_request, pull_request_version)
773 788
774 789 # calculate commit and file changes
775 790 changes = self._calculate_commit_id_changes(
776 791 old_commit_ids, new_commit_ids)
777 792 file_changes = self._calculate_file_changes(
778 793 old_diff_data, new_diff_data)
779 794
780 795 # set comments as outdated if DIFFS changed
781 796 CommentsModel().outdate_comments(
782 797 pull_request, old_diff_data=old_diff_data,
783 798 new_diff_data=new_diff_data)
784 799
785 800 commit_changes = (changes.added or changes.removed)
786 801 file_node_changes = (
787 802 file_changes.added or file_changes.modified or file_changes.removed)
788 803 pr_has_changes = commit_changes or file_node_changes
789 804
790 805 # Add an automatic comment to the pull request, in case
791 806 # anything has changed
792 807 if pr_has_changes:
793 808 update_comment = CommentsModel().create(
794 809 text=self._render_update_message(changes, file_changes),
795 810 repo=pull_request.target_repo,
796 811 user=pull_request.author,
797 812 pull_request=pull_request,
798 813 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
799 814
800 815 # Update status to "Under Review" for added commits
801 816 for commit_id in changes.added:
802 817 ChangesetStatusModel().set_status(
803 818 repo=pull_request.source_repo,
804 819 status=ChangesetStatus.STATUS_UNDER_REVIEW,
805 820 comment=update_comment,
806 821 user=pull_request.author,
807 822 pull_request=pull_request,
808 823 revision=commit_id)
809 824
810 825 log.debug(
811 826 'Updated pull request %s, added_ids: %s, common_ids: %s, '
812 827 'removed_ids: %s', pull_request.pull_request_id,
813 828 changes.added, changes.common, changes.removed)
814 829 log.debug(
815 830 'Updated pull request with the following file changes: %s',
816 831 file_changes)
817 832
818 833 log.info(
819 834 "Updated pull request %s from commit %s to commit %s, "
820 835 "stored new version %s of this pull request.",
821 836 pull_request.pull_request_id, source_ref_id,
822 837 pull_request.source_ref_parts.commit_id,
823 838 pull_request_version.pull_request_version_id)
824 839 Session().commit()
825 840 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
826 841
827 842 return UpdateResponse(
828 843 executed=True, reason=UpdateFailureReason.NONE,
829 844 old=pull_request, new=pull_request_version, changes=changes,
830 845 source_changed=source_changed, target_changed=target_changed)
831 846
832 847 def _create_version_from_snapshot(self, pull_request):
833 848 version = PullRequestVersion()
834 849 version.title = pull_request.title
835 850 version.description = pull_request.description
836 851 version.status = pull_request.status
837 852 version.pull_request_state = pull_request.pull_request_state
838 853 version.created_on = datetime.datetime.now()
839 854 version.updated_on = pull_request.updated_on
840 855 version.user_id = pull_request.user_id
841 856 version.source_repo = pull_request.source_repo
842 857 version.source_ref = pull_request.source_ref
843 858 version.target_repo = pull_request.target_repo
844 859 version.target_ref = pull_request.target_ref
845 860
846 861 version._last_merge_source_rev = pull_request._last_merge_source_rev
847 862 version._last_merge_target_rev = pull_request._last_merge_target_rev
848 863 version.last_merge_status = pull_request.last_merge_status
849 864 version.shadow_merge_ref = pull_request.shadow_merge_ref
850 865 version.merge_rev = pull_request.merge_rev
851 866 version.reviewer_data = pull_request.reviewer_data
852 867
853 868 version.revisions = pull_request.revisions
854 869 version.pull_request = pull_request
855 870 Session().add(version)
856 871 Session().flush()
857 872
858 873 return version
859 874
860 875 def _generate_update_diffs(self, pull_request, pull_request_version):
861 876
862 877 diff_context = (
863 878 self.DIFF_CONTEXT +
864 879 CommentsModel.needed_extra_diff_context())
865 880 hide_whitespace_changes = False
866 881 source_repo = pull_request_version.source_repo
867 882 source_ref_id = pull_request_version.source_ref_parts.commit_id
868 883 target_ref_id = pull_request_version.target_ref_parts.commit_id
869 884 old_diff = self._get_diff_from_pr_or_version(
870 885 source_repo, source_ref_id, target_ref_id,
871 886 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
872 887
873 888 source_repo = pull_request.source_repo
874 889 source_ref_id = pull_request.source_ref_parts.commit_id
875 890 target_ref_id = pull_request.target_ref_parts.commit_id
876 891
877 892 new_diff = self._get_diff_from_pr_or_version(
878 893 source_repo, source_ref_id, target_ref_id,
879 894 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
880 895
881 896 old_diff_data = diffs.DiffProcessor(old_diff)
882 897 old_diff_data.prepare()
883 898 new_diff_data = diffs.DiffProcessor(new_diff)
884 899 new_diff_data.prepare()
885 900
886 901 return old_diff_data, new_diff_data
887 902
888 903 def _link_comments_to_version(self, pull_request_version):
889 904 """
890 905 Link all unlinked comments of this pull request to the given version.
891 906
892 907 :param pull_request_version: The `PullRequestVersion` to which
893 908 the comments shall be linked.
894 909
895 910 """
896 911 pull_request = pull_request_version.pull_request
897 912 comments = ChangesetComment.query()\
898 913 .filter(
899 914 # TODO: johbo: Should we query for the repo at all here?
900 915 # Pending decision on how comments of PRs are to be related
901 916 # to either the source repo, the target repo or no repo at all.
902 917 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
903 918 ChangesetComment.pull_request == pull_request,
904 919 ChangesetComment.pull_request_version == None)\
905 920 .order_by(ChangesetComment.comment_id.asc())
906 921
907 922 # TODO: johbo: Find out why this breaks if it is done in a bulk
908 923 # operation.
909 924 for comment in comments:
910 925 comment.pull_request_version_id = (
911 926 pull_request_version.pull_request_version_id)
912 927 Session().add(comment)
913 928
914 929 def _calculate_commit_id_changes(self, old_ids, new_ids):
915 930 added = [x for x in new_ids if x not in old_ids]
916 931 common = [x for x in new_ids if x in old_ids]
917 932 removed = [x for x in old_ids if x not in new_ids]
918 933 total = new_ids
919 934 return ChangeTuple(added, common, removed, total)
920 935
921 936 def _calculate_file_changes(self, old_diff_data, new_diff_data):
922 937
923 938 old_files = OrderedDict()
924 939 for diff_data in old_diff_data.parsed_diff:
925 940 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
926 941
927 942 added_files = []
928 943 modified_files = []
929 944 removed_files = []
930 945 for diff_data in new_diff_data.parsed_diff:
931 946 new_filename = diff_data['filename']
932 947 new_hash = md5_safe(diff_data['raw_diff'])
933 948
934 949 old_hash = old_files.get(new_filename)
935 950 if not old_hash:
936 951 # file is not present in old diff, means it's added
937 952 added_files.append(new_filename)
938 953 else:
939 954 if new_hash != old_hash:
940 955 modified_files.append(new_filename)
941 956 # now remove a file from old, since we have seen it already
942 957 del old_files[new_filename]
943 958
944 959 # removed files is when there are present in old, but not in NEW,
945 960 # since we remove old files that are present in new diff, left-overs
946 961 # if any should be the removed files
947 962 removed_files.extend(old_files.keys())
948 963
949 964 return FileChangeTuple(added_files, modified_files, removed_files)
950 965
951 966 def _render_update_message(self, changes, file_changes):
952 967 """
953 968 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
954 969 so it's always looking the same disregarding on which default
955 970 renderer system is using.
956 971
957 972 :param changes: changes named tuple
958 973 :param file_changes: file changes named tuple
959 974
960 975 """
961 976 new_status = ChangesetStatus.get_status_lbl(
962 977 ChangesetStatus.STATUS_UNDER_REVIEW)
963 978
964 979 changed_files = (
965 980 file_changes.added + file_changes.modified + file_changes.removed)
966 981
967 982 params = {
968 983 'under_review_label': new_status,
969 984 'added_commits': changes.added,
970 985 'removed_commits': changes.removed,
971 986 'changed_files': changed_files,
972 987 'added_files': file_changes.added,
973 988 'modified_files': file_changes.modified,
974 989 'removed_files': file_changes.removed,
975 990 }
976 991 renderer = RstTemplateRenderer()
977 992 return renderer.render('pull_request_update.mako', **params)
978 993
979 994 def edit(self, pull_request, title, description, description_renderer, user):
980 995 pull_request = self.__get_pull_request(pull_request)
981 996 old_data = pull_request.get_api_data(with_merge_state=False)
982 997 if pull_request.is_closed():
983 998 raise ValueError('This pull request is closed')
984 999 if title:
985 1000 pull_request.title = title
986 1001 pull_request.description = description
987 1002 pull_request.updated_on = datetime.datetime.now()
988 1003 pull_request.description_renderer = description_renderer
989 1004 Session().add(pull_request)
990 1005 self._log_audit_action(
991 1006 'repo.pull_request.edit', {'old_data': old_data},
992 1007 user, pull_request)
993 1008
994 1009 def update_reviewers(self, pull_request, reviewer_data, user):
995 1010 """
996 1011 Update the reviewers in the pull request
997 1012
998 1013 :param pull_request: the pr to update
999 1014 :param reviewer_data: list of tuples
1000 1015 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1001 1016 """
1002 1017 pull_request = self.__get_pull_request(pull_request)
1003 1018 if pull_request.is_closed():
1004 1019 raise ValueError('This pull request is closed')
1005 1020
1006 1021 reviewers = {}
1007 1022 for user_id, reasons, mandatory, rules in reviewer_data:
1008 1023 if isinstance(user_id, (int, compat.string_types)):
1009 1024 user_id = self._get_user(user_id).user_id
1010 1025 reviewers[user_id] = {
1011 1026 'reasons': reasons, 'mandatory': mandatory}
1012 1027
1013 1028 reviewers_ids = set(reviewers.keys())
1014 1029 current_reviewers = PullRequestReviewers.query()\
1015 1030 .filter(PullRequestReviewers.pull_request ==
1016 1031 pull_request).all()
1017 1032 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1018 1033
1019 1034 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1020 1035 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1021 1036
1022 1037 log.debug("Adding %s reviewers", ids_to_add)
1023 1038 log.debug("Removing %s reviewers", ids_to_remove)
1024 1039 changed = False
1025 1040 added_audit_reviewers = []
1026 1041 removed_audit_reviewers = []
1027 1042
1028 1043 for uid in ids_to_add:
1029 1044 changed = True
1030 1045 _usr = self._get_user(uid)
1031 1046 reviewer = PullRequestReviewers()
1032 1047 reviewer.user = _usr
1033 1048 reviewer.pull_request = pull_request
1034 1049 reviewer.reasons = reviewers[uid]['reasons']
1035 1050 # NOTE(marcink): mandatory shouldn't be changed now
1036 1051 # reviewer.mandatory = reviewers[uid]['reasons']
1037 1052 Session().add(reviewer)
1038 1053 added_audit_reviewers.append(reviewer.get_dict())
1039 1054
1040 1055 for uid in ids_to_remove:
1041 1056 changed = True
1042 1057 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1043 1058 # that prevents and fixes cases that we added the same reviewer twice.
1044 1059 # this CAN happen due to the lack of DB checks
1045 1060 reviewers = PullRequestReviewers.query()\
1046 1061 .filter(PullRequestReviewers.user_id == uid,
1047 1062 PullRequestReviewers.pull_request == pull_request)\
1048 1063 .all()
1049 1064
1050 1065 for obj in reviewers:
1051 1066 added_audit_reviewers.append(obj.get_dict())
1052 1067 Session().delete(obj)
1053 1068
1054 1069 if changed:
1055 1070 Session().expire_all()
1056 1071 pull_request.updated_on = datetime.datetime.now()
1057 1072 Session().add(pull_request)
1058 1073
1059 1074 # finally store audit logs
1060 1075 for user_data in added_audit_reviewers:
1061 1076 self._log_audit_action(
1062 1077 'repo.pull_request.reviewer.add', {'data': user_data},
1063 1078 user, pull_request)
1064 1079 for user_data in removed_audit_reviewers:
1065 1080 self._log_audit_action(
1066 1081 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1067 1082 user, pull_request)
1068 1083
1069 1084 self.notify_reviewers(pull_request, ids_to_add)
1070 1085 return ids_to_add, ids_to_remove
1071 1086
1072 1087 def get_url(self, pull_request, request=None, permalink=False):
1073 1088 if not request:
1074 1089 request = get_current_request()
1075 1090
1076 1091 if permalink:
1077 1092 return request.route_url(
1078 1093 'pull_requests_global',
1079 1094 pull_request_id=pull_request.pull_request_id,)
1080 1095 else:
1081 1096 return request.route_url('pullrequest_show',
1082 1097 repo_name=safe_str(pull_request.target_repo.repo_name),
1083 1098 pull_request_id=pull_request.pull_request_id,)
1084 1099
1085 1100 def get_shadow_clone_url(self, pull_request, request=None):
1086 1101 """
1087 1102 Returns qualified url pointing to the shadow repository. If this pull
1088 1103 request is closed there is no shadow repository and ``None`` will be
1089 1104 returned.
1090 1105 """
1091 1106 if pull_request.is_closed():
1092 1107 return None
1093 1108 else:
1094 1109 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1095 1110 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1096 1111
1097 1112 def notify_reviewers(self, pull_request, reviewers_ids):
1098 1113 # notification to reviewers
1099 1114 if not reviewers_ids:
1100 1115 return
1101 1116
1102 1117 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1103 1118
1104 1119 pull_request_obj = pull_request
1105 1120 # get the current participants of this pull request
1106 1121 recipients = reviewers_ids
1107 1122 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1108 1123
1109 1124 pr_source_repo = pull_request_obj.source_repo
1110 1125 pr_target_repo = pull_request_obj.target_repo
1111 1126
1112 1127 pr_url = h.route_url('pullrequest_show',
1113 1128 repo_name=pr_target_repo.repo_name,
1114 1129 pull_request_id=pull_request_obj.pull_request_id,)
1115 1130
1116 1131 # set some variables for email notification
1117 1132 pr_target_repo_url = h.route_url(
1118 1133 'repo_summary', repo_name=pr_target_repo.repo_name)
1119 1134
1120 1135 pr_source_repo_url = h.route_url(
1121 1136 'repo_summary', repo_name=pr_source_repo.repo_name)
1122 1137
1123 1138 # pull request specifics
1124 1139 pull_request_commits = [
1125 1140 (x.raw_id, x.message)
1126 1141 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1127 1142
1128 1143 kwargs = {
1129 1144 'user': pull_request.author,
1130 1145 'pull_request': pull_request_obj,
1131 1146 'pull_request_commits': pull_request_commits,
1132 1147
1133 1148 'pull_request_target_repo': pr_target_repo,
1134 1149 'pull_request_target_repo_url': pr_target_repo_url,
1135 1150
1136 1151 'pull_request_source_repo': pr_source_repo,
1137 1152 'pull_request_source_repo_url': pr_source_repo_url,
1138 1153
1139 1154 'pull_request_url': pr_url,
1140 1155 }
1141 1156
1142 1157 # pre-generate the subject for notification itself
1143 1158 (subject,
1144 1159 _h, _e, # we don't care about those
1145 1160 body_plaintext) = EmailNotificationModel().render_email(
1146 1161 notification_type, **kwargs)
1147 1162
1148 1163 # create notification objects, and emails
1149 1164 NotificationModel().create(
1150 1165 created_by=pull_request.author,
1151 1166 notification_subject=subject,
1152 1167 notification_body=body_plaintext,
1153 1168 notification_type=notification_type,
1154 1169 recipients=recipients,
1155 1170 email_kwargs=kwargs,
1156 1171 )
1157 1172
1158 1173 def delete(self, pull_request, user):
1159 1174 pull_request = self.__get_pull_request(pull_request)
1160 1175 old_data = pull_request.get_api_data(with_merge_state=False)
1161 1176 self._cleanup_merge_workspace(pull_request)
1162 1177 self._log_audit_action(
1163 1178 'repo.pull_request.delete', {'old_data': old_data},
1164 1179 user, pull_request)
1165 1180 Session().delete(pull_request)
1166 1181
1167 1182 def close_pull_request(self, pull_request, user):
1168 1183 pull_request = self.__get_pull_request(pull_request)
1169 1184 self._cleanup_merge_workspace(pull_request)
1170 1185 pull_request.status = PullRequest.STATUS_CLOSED
1171 1186 pull_request.updated_on = datetime.datetime.now()
1172 1187 Session().add(pull_request)
1173 1188 self.trigger_pull_request_hook(
1174 1189 pull_request, pull_request.author, 'close')
1175 1190
1176 1191 pr_data = pull_request.get_api_data(with_merge_state=False)
1177 1192 self._log_audit_action(
1178 1193 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1179 1194
1180 1195 def close_pull_request_with_comment(
1181 1196 self, pull_request, user, repo, message=None, auth_user=None):
1182 1197
1183 1198 pull_request_review_status = pull_request.calculated_review_status()
1184 1199
1185 1200 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1186 1201 # approved only if we have voting consent
1187 1202 status = ChangesetStatus.STATUS_APPROVED
1188 1203 else:
1189 1204 status = ChangesetStatus.STATUS_REJECTED
1190 1205 status_lbl = ChangesetStatus.get_status_lbl(status)
1191 1206
1192 1207 default_message = (
1193 1208 'Closing with status change {transition_icon} {status}.'
1194 1209 ).format(transition_icon='>', status=status_lbl)
1195 1210 text = message or default_message
1196 1211
1197 1212 # create a comment, and link it to new status
1198 1213 comment = CommentsModel().create(
1199 1214 text=text,
1200 1215 repo=repo.repo_id,
1201 1216 user=user.user_id,
1202 1217 pull_request=pull_request.pull_request_id,
1203 1218 status_change=status_lbl,
1204 1219 status_change_type=status,
1205 1220 closing_pr=True,
1206 1221 auth_user=auth_user,
1207 1222 )
1208 1223
1209 1224 # calculate old status before we change it
1210 1225 old_calculated_status = pull_request.calculated_review_status()
1211 1226 ChangesetStatusModel().set_status(
1212 1227 repo.repo_id,
1213 1228 status,
1214 1229 user.user_id,
1215 1230 comment=comment,
1216 1231 pull_request=pull_request.pull_request_id
1217 1232 )
1218 1233
1219 1234 Session().flush()
1220 1235 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1221 1236 # we now calculate the status of pull request again, and based on that
1222 1237 # calculation trigger status change. This might happen in cases
1223 1238 # that non-reviewer admin closes a pr, which means his vote doesn't
1224 1239 # change the status, while if he's a reviewer this might change it.
1225 1240 calculated_status = pull_request.calculated_review_status()
1226 1241 if old_calculated_status != calculated_status:
1227 1242 self.trigger_pull_request_hook(
1228 1243 pull_request, user, 'review_status_change',
1229 1244 data={'status': calculated_status})
1230 1245
1231 1246 # finally close the PR
1232 1247 PullRequestModel().close_pull_request(
1233 1248 pull_request.pull_request_id, user)
1234 1249
1235 1250 return comment, status
1236 1251
1237 1252 def merge_status(self, pull_request, translator=None,
1238 1253 force_shadow_repo_refresh=False):
1239 1254 _ = translator or get_current_request().translate
1240 1255
1241 1256 if not self._is_merge_enabled(pull_request):
1242 1257 return False, _('Server-side pull request merging is disabled.')
1243 1258 if pull_request.is_closed():
1244 1259 return False, _('This pull request is closed.')
1245 1260 merge_possible, msg = self._check_repo_requirements(
1246 1261 target=pull_request.target_repo, source=pull_request.source_repo,
1247 1262 translator=_)
1248 1263 if not merge_possible:
1249 1264 return merge_possible, msg
1250 1265
1251 1266 try:
1252 1267 resp = self._try_merge(
1253 1268 pull_request,
1254 1269 force_shadow_repo_refresh=force_shadow_repo_refresh)
1255 1270 log.debug("Merge response: %s", resp)
1256 1271 status = resp.possible, resp.merge_status_message
1257 1272 except NotImplementedError:
1258 1273 status = False, _('Pull request merging is not supported.')
1259 1274
1260 1275 return status
1261 1276
1262 1277 def _check_repo_requirements(self, target, source, translator):
1263 1278 """
1264 1279 Check if `target` and `source` have compatible requirements.
1265 1280
1266 1281 Currently this is just checking for largefiles.
1267 1282 """
1268 1283 _ = translator
1269 1284 target_has_largefiles = self._has_largefiles(target)
1270 1285 source_has_largefiles = self._has_largefiles(source)
1271 1286 merge_possible = True
1272 1287 message = u''
1273 1288
1274 1289 if target_has_largefiles != source_has_largefiles:
1275 1290 merge_possible = False
1276 1291 if source_has_largefiles:
1277 1292 message = _(
1278 1293 'Target repository large files support is disabled.')
1279 1294 else:
1280 1295 message = _(
1281 1296 'Source repository large files support is disabled.')
1282 1297
1283 1298 return merge_possible, message
1284 1299
1285 1300 def _has_largefiles(self, repo):
1286 1301 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1287 1302 'extensions', 'largefiles')
1288 1303 return largefiles_ui and largefiles_ui[0].active
1289 1304
1290 1305 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1291 1306 """
1292 1307 Try to merge the pull request and return the merge status.
1293 1308 """
1294 1309 log.debug(
1295 1310 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1296 1311 pull_request.pull_request_id, force_shadow_repo_refresh)
1297 1312 target_vcs = pull_request.target_repo.scm_instance()
1298 1313 # Refresh the target reference.
1299 1314 try:
1300 1315 target_ref = self._refresh_reference(
1301 1316 pull_request.target_ref_parts, target_vcs)
1302 1317 except CommitDoesNotExistError:
1303 1318 merge_state = MergeResponse(
1304 1319 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1305 1320 metadata={'target_ref': pull_request.target_ref_parts})
1306 1321 return merge_state
1307 1322
1308 1323 target_locked = pull_request.target_repo.locked
1309 1324 if target_locked and target_locked[0]:
1310 1325 locked_by = 'user:{}'.format(target_locked[0])
1311 1326 log.debug("The target repository is locked by %s.", locked_by)
1312 1327 merge_state = MergeResponse(
1313 1328 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1314 1329 metadata={'locked_by': locked_by})
1315 1330 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1316 1331 pull_request, target_ref):
1317 1332 log.debug("Refreshing the merge status of the repository.")
1318 1333 merge_state = self._refresh_merge_state(
1319 1334 pull_request, target_vcs, target_ref)
1320 1335 else:
1321 1336 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1322 1337 metadata = {
1323 1338 'target_ref': pull_request.target_ref_parts,
1324 1339 'source_ref': pull_request.source_ref_parts,
1325 1340 }
1326 1341 if not possible and target_ref.type == 'branch':
1327 1342 # NOTE(marcink): case for mercurial multiple heads on branch
1328 1343 heads = target_vcs._heads(target_ref.name)
1329 1344 if len(heads) != 1:
1330 1345 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1331 1346 metadata.update({
1332 1347 'heads': heads
1333 1348 })
1334 1349 merge_state = MergeResponse(
1335 1350 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1336 1351
1337 1352 return merge_state
1338 1353
1339 1354 def _refresh_reference(self, reference, vcs_repository):
1340 1355 if reference.type in self.UPDATABLE_REF_TYPES:
1341 1356 name_or_id = reference.name
1342 1357 else:
1343 1358 name_or_id = reference.commit_id
1344 1359
1345 1360 refreshed_commit = vcs_repository.get_commit(name_or_id)
1346 1361 refreshed_reference = Reference(
1347 1362 reference.type, reference.name, refreshed_commit.raw_id)
1348 1363 return refreshed_reference
1349 1364
1350 1365 def _needs_merge_state_refresh(self, pull_request, target_reference):
1351 1366 return not(
1352 1367 pull_request.revisions and
1353 1368 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1354 1369 target_reference.commit_id == pull_request._last_merge_target_rev)
1355 1370
1356 1371 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1357 1372 workspace_id = self._workspace_id(pull_request)
1358 1373 source_vcs = pull_request.source_repo.scm_instance()
1359 1374 repo_id = pull_request.target_repo.repo_id
1360 1375 use_rebase = self._use_rebase_for_merging(pull_request)
1361 1376 close_branch = self._close_branch_before_merging(pull_request)
1362 1377 merge_state = target_vcs.merge(
1363 1378 repo_id, workspace_id,
1364 1379 target_reference, source_vcs, pull_request.source_ref_parts,
1365 1380 dry_run=True, use_rebase=use_rebase,
1366 1381 close_branch=close_branch)
1367 1382
1368 1383 # Do not store the response if there was an unknown error.
1369 1384 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1370 1385 pull_request._last_merge_source_rev = \
1371 1386 pull_request.source_ref_parts.commit_id
1372 1387 pull_request._last_merge_target_rev = target_reference.commit_id
1373 1388 pull_request.last_merge_status = merge_state.failure_reason
1374 1389 pull_request.shadow_merge_ref = merge_state.merge_ref
1375 1390 Session().add(pull_request)
1376 1391 Session().commit()
1377 1392
1378 1393 return merge_state
1379 1394
1380 1395 def _workspace_id(self, pull_request):
1381 1396 workspace_id = 'pr-%s' % pull_request.pull_request_id
1382 1397 return workspace_id
1383 1398
1384 1399 def generate_repo_data(self, repo, commit_id=None, branch=None,
1385 1400 bookmark=None, translator=None):
1386 1401 from rhodecode.model.repo import RepoModel
1387 1402
1388 1403 all_refs, selected_ref = \
1389 1404 self._get_repo_pullrequest_sources(
1390 1405 repo.scm_instance(), commit_id=commit_id,
1391 1406 branch=branch, bookmark=bookmark, translator=translator)
1392 1407
1393 1408 refs_select2 = []
1394 1409 for element in all_refs:
1395 1410 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1396 1411 refs_select2.append({'text': element[1], 'children': children})
1397 1412
1398 1413 return {
1399 1414 'user': {
1400 1415 'user_id': repo.user.user_id,
1401 1416 'username': repo.user.username,
1402 1417 'firstname': repo.user.first_name,
1403 1418 'lastname': repo.user.last_name,
1404 1419 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1405 1420 },
1406 1421 'name': repo.repo_name,
1407 1422 'link': RepoModel().get_url(repo),
1408 1423 'description': h.chop_at_smart(repo.description_safe, '\n'),
1409 1424 'refs': {
1410 1425 'all_refs': all_refs,
1411 1426 'selected_ref': selected_ref,
1412 1427 'select2_refs': refs_select2
1413 1428 }
1414 1429 }
1415 1430
1416 1431 def generate_pullrequest_title(self, source, source_ref, target):
1417 1432 return u'{source}#{at_ref} to {target}'.format(
1418 1433 source=source,
1419 1434 at_ref=source_ref,
1420 1435 target=target,
1421 1436 )
1422 1437
1423 1438 def _cleanup_merge_workspace(self, pull_request):
1424 1439 # Merging related cleanup
1425 1440 repo_id = pull_request.target_repo.repo_id
1426 1441 target_scm = pull_request.target_repo.scm_instance()
1427 1442 workspace_id = self._workspace_id(pull_request)
1428 1443
1429 1444 try:
1430 1445 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1431 1446 except NotImplementedError:
1432 1447 pass
1433 1448
1434 1449 def _get_repo_pullrequest_sources(
1435 1450 self, repo, commit_id=None, branch=None, bookmark=None,
1436 1451 translator=None):
1437 1452 """
1438 1453 Return a structure with repo's interesting commits, suitable for
1439 1454 the selectors in pullrequest controller
1440 1455
1441 1456 :param commit_id: a commit that must be in the list somehow
1442 1457 and selected by default
1443 1458 :param branch: a branch that must be in the list and selected
1444 1459 by default - even if closed
1445 1460 :param bookmark: a bookmark that must be in the list and selected
1446 1461 """
1447 1462 _ = translator or get_current_request().translate
1448 1463
1449 1464 commit_id = safe_str(commit_id) if commit_id else None
1450 1465 branch = safe_unicode(branch) if branch else None
1451 1466 bookmark = safe_unicode(bookmark) if bookmark else None
1452 1467
1453 1468 selected = None
1454 1469
1455 1470 # order matters: first source that has commit_id in it will be selected
1456 1471 sources = []
1457 1472 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1458 1473 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1459 1474
1460 1475 if commit_id:
1461 1476 ref_commit = (h.short_id(commit_id), commit_id)
1462 1477 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1463 1478
1464 1479 sources.append(
1465 1480 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1466 1481 )
1467 1482
1468 1483 groups = []
1469 1484
1470 1485 for group_key, ref_list, group_name, match in sources:
1471 1486 group_refs = []
1472 1487 for ref_name, ref_id in ref_list:
1473 1488 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1474 1489 group_refs.append((ref_key, ref_name))
1475 1490
1476 1491 if not selected:
1477 1492 if set([commit_id, match]) & set([ref_id, ref_name]):
1478 1493 selected = ref_key
1479 1494
1480 1495 if group_refs:
1481 1496 groups.append((group_refs, group_name))
1482 1497
1483 1498 if not selected:
1484 1499 ref = commit_id or branch or bookmark
1485 1500 if ref:
1486 1501 raise CommitDoesNotExistError(
1487 1502 u'No commit refs could be found matching: {}'.format(ref))
1488 1503 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1489 1504 selected = u'branch:{}:{}'.format(
1490 1505 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1491 1506 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1492 1507 )
1493 1508 elif repo.commit_ids:
1494 1509 # make the user select in this case
1495 1510 selected = None
1496 1511 else:
1497 1512 raise EmptyRepositoryError()
1498 1513 return groups, selected
1499 1514
1500 1515 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1501 1516 hide_whitespace_changes, diff_context):
1502 1517
1503 1518 return self._get_diff_from_pr_or_version(
1504 1519 source_repo, source_ref_id, target_ref_id,
1505 1520 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1506 1521
1507 1522 def _get_diff_from_pr_or_version(
1508 1523 self, source_repo, source_ref_id, target_ref_id,
1509 1524 hide_whitespace_changes, diff_context):
1510 1525
1511 1526 target_commit = source_repo.get_commit(
1512 1527 commit_id=safe_str(target_ref_id))
1513 1528 source_commit = source_repo.get_commit(
1514 1529 commit_id=safe_str(source_ref_id))
1515 1530 if isinstance(source_repo, Repository):
1516 1531 vcs_repo = source_repo.scm_instance()
1517 1532 else:
1518 1533 vcs_repo = source_repo
1519 1534
1520 1535 # TODO: johbo: In the context of an update, we cannot reach
1521 1536 # the old commit anymore with our normal mechanisms. It needs
1522 1537 # some sort of special support in the vcs layer to avoid this
1523 1538 # workaround.
1524 1539 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1525 1540 vcs_repo.alias == 'git'):
1526 1541 source_commit.raw_id = safe_str(source_ref_id)
1527 1542
1528 1543 log.debug('calculating diff between '
1529 1544 'source_ref:%s and target_ref:%s for repo `%s`',
1530 1545 target_ref_id, source_ref_id,
1531 1546 safe_unicode(vcs_repo.path))
1532 1547
1533 1548 vcs_diff = vcs_repo.get_diff(
1534 1549 commit1=target_commit, commit2=source_commit,
1535 1550 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1536 1551 return vcs_diff
1537 1552
1538 1553 def _is_merge_enabled(self, pull_request):
1539 1554 return self._get_general_setting(
1540 1555 pull_request, 'rhodecode_pr_merge_enabled')
1541 1556
1542 1557 def _use_rebase_for_merging(self, pull_request):
1543 1558 repo_type = pull_request.target_repo.repo_type
1544 1559 if repo_type == 'hg':
1545 1560 return self._get_general_setting(
1546 1561 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1547 1562 elif repo_type == 'git':
1548 1563 return self._get_general_setting(
1549 1564 pull_request, 'rhodecode_git_use_rebase_for_merging')
1550 1565
1551 1566 return False
1552 1567
1553 1568 def _close_branch_before_merging(self, pull_request):
1554 1569 repo_type = pull_request.target_repo.repo_type
1555 1570 if repo_type == 'hg':
1556 1571 return self._get_general_setting(
1557 1572 pull_request, 'rhodecode_hg_close_branch_before_merging')
1558 1573 elif repo_type == 'git':
1559 1574 return self._get_general_setting(
1560 1575 pull_request, 'rhodecode_git_close_branch_before_merging')
1561 1576
1562 1577 return False
1563 1578
1564 1579 def _get_general_setting(self, pull_request, settings_key, default=False):
1565 1580 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1566 1581 settings = settings_model.get_general_settings()
1567 1582 return settings.get(settings_key, default)
1568 1583
1569 1584 def _log_audit_action(self, action, action_data, user, pull_request):
1570 1585 audit_logger.store(
1571 1586 action=action,
1572 1587 action_data=action_data,
1573 1588 user=user,
1574 1589 repo=pull_request.target_repo)
1575 1590
1576 1591 def get_reviewer_functions(self):
1577 1592 """
1578 1593 Fetches functions for validation and fetching default reviewers.
1579 1594 If available we use the EE package, else we fallback to CE
1580 1595 package functions
1581 1596 """
1582 1597 try:
1583 1598 from rc_reviewers.utils import get_default_reviewers_data
1584 1599 from rc_reviewers.utils import validate_default_reviewers
1585 1600 except ImportError:
1586 1601 from rhodecode.apps.repository.utils import get_default_reviewers_data
1587 1602 from rhodecode.apps.repository.utils import validate_default_reviewers
1588 1603
1589 1604 return get_default_reviewers_data, validate_default_reviewers
1590 1605
1591 1606
1592 1607 class MergeCheck(object):
1593 1608 """
1594 1609 Perform Merge Checks and returns a check object which stores information
1595 1610 about merge errors, and merge conditions
1596 1611 """
1597 1612 TODO_CHECK = 'todo'
1598 1613 PERM_CHECK = 'perm'
1599 1614 REVIEW_CHECK = 'review'
1600 1615 MERGE_CHECK = 'merge'
1601 1616
1602 1617 def __init__(self):
1603 1618 self.review_status = None
1604 1619 self.merge_possible = None
1605 1620 self.merge_msg = ''
1606 1621 self.failed = None
1607 1622 self.errors = []
1608 1623 self.error_details = OrderedDict()
1609 1624
1610 1625 def push_error(self, error_type, message, error_key, details):
1611 1626 self.failed = True
1612 1627 self.errors.append([error_type, message])
1613 1628 self.error_details[error_key] = dict(
1614 1629 details=details,
1615 1630 error_type=error_type,
1616 1631 message=message
1617 1632 )
1618 1633
1619 1634 @classmethod
1620 1635 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1621 1636 force_shadow_repo_refresh=False):
1622 1637 _ = translator
1623 1638 merge_check = cls()
1624 1639
1625 1640 # permissions to merge
1626 1641 user_allowed_to_merge = PullRequestModel().check_user_merge(
1627 1642 pull_request, auth_user)
1628 1643 if not user_allowed_to_merge:
1629 1644 log.debug("MergeCheck: cannot merge, approval is pending.")
1630 1645
1631 1646 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1632 1647 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1633 1648 if fail_early:
1634 1649 return merge_check
1635 1650
1636 1651 # permission to merge into the target branch
1637 1652 target_commit_id = pull_request.target_ref_parts.commit_id
1638 1653 if pull_request.target_ref_parts.type == 'branch':
1639 1654 branch_name = pull_request.target_ref_parts.name
1640 1655 else:
1641 1656 # for mercurial we can always figure out the branch from the commit
1642 1657 # in case of bookmark
1643 1658 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1644 1659 branch_name = target_commit.branch
1645 1660
1646 1661 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1647 1662 pull_request.target_repo.repo_name, branch_name)
1648 1663 if branch_perm and branch_perm == 'branch.none':
1649 1664 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1650 1665 branch_name, rule)
1651 1666 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1652 1667 if fail_early:
1653 1668 return merge_check
1654 1669
1655 1670 # review status, must be always present
1656 1671 review_status = pull_request.calculated_review_status()
1657 1672 merge_check.review_status = review_status
1658 1673
1659 1674 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1660 1675 if not status_approved:
1661 1676 log.debug("MergeCheck: cannot merge, approval is pending.")
1662 1677
1663 1678 msg = _('Pull request reviewer approval is pending.')
1664 1679
1665 1680 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1666 1681
1667 1682 if fail_early:
1668 1683 return merge_check
1669 1684
1670 1685 # left over TODOs
1671 1686 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1672 1687 if todos:
1673 1688 log.debug("MergeCheck: cannot merge, {} "
1674 1689 "unresolved TODOs left.".format(len(todos)))
1675 1690
1676 1691 if len(todos) == 1:
1677 1692 msg = _('Cannot merge, {} TODO still not resolved.').format(
1678 1693 len(todos))
1679 1694 else:
1680 1695 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1681 1696 len(todos))
1682 1697
1683 1698 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1684 1699
1685 1700 if fail_early:
1686 1701 return merge_check
1687 1702
1688 1703 # merge possible, here is the filesystem simulation + shadow repo
1689 1704 merge_status, msg = PullRequestModel().merge_status(
1690 1705 pull_request, translator=translator,
1691 1706 force_shadow_repo_refresh=force_shadow_repo_refresh)
1692 1707 merge_check.merge_possible = merge_status
1693 1708 merge_check.merge_msg = msg
1694 1709 if not merge_status:
1695 1710 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1696 1711 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1697 1712
1698 1713 if fail_early:
1699 1714 return merge_check
1700 1715
1701 1716 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1702 1717 return merge_check
1703 1718
1704 1719 @classmethod
1705 1720 def get_merge_conditions(cls, pull_request, translator):
1706 1721 _ = translator
1707 1722 merge_details = {}
1708 1723
1709 1724 model = PullRequestModel()
1710 1725 use_rebase = model._use_rebase_for_merging(pull_request)
1711 1726
1712 1727 if use_rebase:
1713 1728 merge_details['merge_strategy'] = dict(
1714 1729 details={},
1715 1730 message=_('Merge strategy: rebase')
1716 1731 )
1717 1732 else:
1718 1733 merge_details['merge_strategy'] = dict(
1719 1734 details={},
1720 1735 message=_('Merge strategy: explicit merge commit')
1721 1736 )
1722 1737
1723 1738 close_branch = model._close_branch_before_merging(pull_request)
1724 1739 if close_branch:
1725 1740 repo_type = pull_request.target_repo.repo_type
1726 1741 close_msg = ''
1727 1742 if repo_type == 'hg':
1728 1743 close_msg = _('Source branch will be closed after merge.')
1729 1744 elif repo_type == 'git':
1730 1745 close_msg = _('Source branch will be deleted after merge.')
1731 1746
1732 1747 merge_details['close_branch'] = dict(
1733 1748 details={},
1734 1749 message=close_msg
1735 1750 )
1736 1751
1737 1752 return merge_details
1738 1753
1739 1754
1740 1755 ChangeTuple = collections.namedtuple(
1741 1756 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1742 1757
1743 1758 FileChangeTuple = collections.namedtuple(
1744 1759 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,121 +1,143 b''
1 1 <%inherit file="/base/base.mako"/>
2 2
3 3 <%def name="title()">
4 4 ${_('%s Pull Requests') % c.repo_name}
5 5 %if c.rhodecode_name:
6 6 &middot; ${h.branding(c.rhodecode_name)}
7 7 %endif
8 8 </%def>
9 9
10 10 <%def name="breadcrumbs_links()"></%def>
11 11
12 12 <%def name="menu_bar_nav()">
13 13 ${self.menu_items(active='repositories')}
14 14 </%def>
15 15
16 16
17 17 <%def name="menu_bar_subnav()">
18 18 ${self.repo_menu(active='showpullrequest')}
19 19 </%def>
20 20
21 21
22 22 <%def name="main()">
23 23
24 24 <div class="box">
25 25 <div class="title">
26 26 <ul class="button-links">
27 27 <li class="btn ${('active' if c.active=='open' else '')}"><a href="${h.route_path('pullrequest_show_all',repo_name=c.repo_name, _query={'source':0})}">${_('Opened')}</a></li>
28 28 <li class="btn ${('active' if c.active=='my' else '')}"><a href="${h.route_path('pullrequest_show_all',repo_name=c.repo_name, _query={'source':0,'my':1})}">${_('Opened by me')}</a></li>
29 29 <li class="btn ${('active' if c.active=='awaiting' else '')}"><a href="${h.route_path('pullrequest_show_all',repo_name=c.repo_name, _query={'source':0,'awaiting_review':1})}">${_('Awaiting review')}</a></li>
30 30 <li class="btn ${('active' if c.active=='awaiting_my' else '')}"><a href="${h.route_path('pullrequest_show_all',repo_name=c.repo_name, _query={'source':0,'awaiting_my_review':1})}">${_('Awaiting my review')}</a></li>
31 31 <li class="btn ${('active' if c.active=='closed' else '')}"><a href="${h.route_path('pullrequest_show_all',repo_name=c.repo_name, _query={'source':0,'closed':1})}">${_('Closed')}</a></li>
32 32 <li class="btn ${('active' if c.active=='source' else '')}"><a href="${h.route_path('pullrequest_show_all',repo_name=c.repo_name, _query={'source':1})}">${_('From this repo')}</a></li>
33 33 </ul>
34 34
35 35 <ul class="links">
36 36 % if c.rhodecode_user.username != h.DEFAULT_USER:
37 37 <li>
38 38 <span>
39 39 <a id="open_new_pull_request" class="btn btn-small btn-success" href="${h.route_path('pullrequest_new',repo_name=c.repo_name)}">
40 40 ${_('Open new Pull Request')}
41 41 </a>
42 42 </span>
43 43 </li>
44 44 % endif
45
46 <li>
47 <div class="grid-quick-filter">
48 <ul class="grid-filter-box">
49 <li class="grid-filter-box-icon">
50 <i class="icon-search"></i>
51 </li>
52 <li class="grid-filter-box-input">
53 <input class="q_filter_box" id="q_filter" size="15" type="text" name="filter" placeholder="${_('quick filter...')}" value=""/>
54 </li>
55 </ul>
56 </div>
57 </li>
58
45 59 </ul>
46 60
47 61 </div>
48 62
49 63 <div class="main-content-full-width">
50 64 <table id="pull_request_list_table" class="display"></table>
51 65 </div>
52 66
53 67 </div>
54 68
55 69 <script type="text/javascript">
56 70 $(document).ready(function() {
57
58 71 var $pullRequestListTable = $('#pull_request_list_table');
59 72
60 73 // object list
61 74 $pullRequestListTable.DataTable({
62 75 processing: true,
63 76 serverSide: true,
64 77 ajax: {
65 78 "url": "${h.route_path('pullrequest_show_all_data', repo_name=c.repo_name)}",
66 79 "data": function (d) {
67 80 d.source = "${c.source}";
68 81 d.closed = "${c.closed}";
69 82 d.my = "${c.my}";
70 83 d.awaiting_review = "${c.awaiting_review}";
71 84 d.awaiting_my_review = "${c.awaiting_my_review}";
72 85 }
73 86 },
74 87 dom: 'rtp',
75 88 pageLength: ${c.visual.dashboard_items},
76 89 order: [[ 1, "desc" ]],
77 90 columns: [
78 91 { data: {"_": "status",
79 92 "sort": "status"}, title: "", className: "td-status", orderable: false},
80 93 { data: {"_": "name",
81 94 "sort": "name_raw"}, title: "${_('Id')}", className: "td-componentname", "type": "num" },
82 95 { data: {"_": "title",
83 96 "sort": "title"}, title: "${_('Title')}", className: "td-description" },
84 97 { data: {"_": "author",
85 98 "sort": "author_raw"}, title: "${_('Author')}", className: "td-user", orderable: false },
86 99 { data: {"_": "comments",
87 100 "sort": "comments_raw"}, title: "", className: "td-comments", orderable: false},
88 101 { data: {"_": "updated_on",
89 102 "sort": "updated_on_raw"}, title: "${_('Last Update')}", className: "td-time" }
90 103 ],
91 104 language: {
92 105 paginate: DEFAULT_GRID_PAGINATION,
93 106 sProcessing: _gettext('loading...'),
94 107 emptyTable: _gettext("No pull requests available yet.")
95 108 },
96 109 "drawCallback": function( settings, json ) {
97 110 timeagoActivate();
98 111 tooltipActivate();
99 112 },
100 113 "createdRow": function ( row, data, index ) {
101 114 if (data['closed']) {
102 115 $(row).addClass('closed');
103 116 }
104 117 if (data['state'] !== 'created') {
105 118 $(row).addClass('state-' + data['state']);
106 119 }
107 120 }
108 121 });
109 122
110 123 $pullRequestListTable.on('xhr.dt', function(e, settings, json, xhr){
111 124 $pullRequestListTable.css('opacity', 1);
112 125 });
113 126
114 127 $pullRequestListTable.on('preXhr.dt', function(e, settings, data){
115 128 $pullRequestListTable.css('opacity', 0.3);
116 129 });
117 130
131 // filter
132 $('#q_filter').on('keyup',
133 $.debounce(250, function() {
134 $pullRequestListTable.DataTable().search(
135 $('#q_filter').val()
136 ).draw();
137 })
138 );
139
118 140 });
119 141
120 142 </script>
121 143 </%def>
General Comments 0
You need to be logged in to leave comments. Login now